]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree.c
fd0e6921bfd754c0cf9e6bd01f39b432166a6dfa
[thirdparty/gcc.git] / gcc / tree.c
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2016 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
26
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "backend.h"
34 #include "target.h"
35 #include "tree.h"
36 #include "gimple.h"
37 #include "tree-pass.h"
38 #include "ssa.h"
39 #include "cgraph.h"
40 #include "diagnostic.h"
41 #include "flags.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "attribs.h"
47 #include "toplev.h" /* get_random_seed */
48 #include "output.h"
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
55 #include "gimplify.h"
56 #include "tree-dfa.h"
57 #include "params.h"
58 #include "langhooks-def.h"
59 #include "tree-diagnostic.h"
60 #include "except.h"
61 #include "builtins.h"
62 #include "print-tree.h"
63 #include "ipa-utils.h"
64 #include "selftest.h"
65
66 /* Tree code classes. */
67
68 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
69 #define END_OF_BASE_TREE_CODES tcc_exceptional,
70
71 const enum tree_code_class tree_code_type[] = {
72 #include "all-tree.def"
73 };
74
75 #undef DEFTREECODE
76 #undef END_OF_BASE_TREE_CODES
77
78 /* Table indexed by tree code giving number of expression
79 operands beyond the fixed part of the node structure.
80 Not used for types or decls. */
81
82 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
83 #define END_OF_BASE_TREE_CODES 0,
84
85 const unsigned char tree_code_length[] = {
86 #include "all-tree.def"
87 };
88
89 #undef DEFTREECODE
90 #undef END_OF_BASE_TREE_CODES
91
92 /* Names of tree components.
93 Used for printing out the tree and error messages. */
94 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
95 #define END_OF_BASE_TREE_CODES "@dummy",
96
97 static const char *const tree_code_name[] = {
98 #include "all-tree.def"
99 };
100
101 #undef DEFTREECODE
102 #undef END_OF_BASE_TREE_CODES
103
104 /* Each tree code class has an associated string representation.
105 These must correspond to the tree_code_class entries. */
106
107 const char *const tree_code_class_strings[] =
108 {
109 "exceptional",
110 "constant",
111 "type",
112 "declaration",
113 "reference",
114 "comparison",
115 "unary",
116 "binary",
117 "statement",
118 "vl_exp",
119 "expression"
120 };
121
122 /* obstack.[ch] explicitly declined to prototype this. */
123 extern int _obstack_allocated_p (struct obstack *h, void *obj);
124
125 /* Statistics-gathering stuff. */
126
127 static int tree_code_counts[MAX_TREE_CODES];
128 int tree_node_counts[(int) all_kinds];
129 int tree_node_sizes[(int) all_kinds];
130
131 /* Keep in sync with tree.h:enum tree_node_kind. */
132 static const char * const tree_node_kind_names[] = {
133 "decls",
134 "types",
135 "blocks",
136 "stmts",
137 "refs",
138 "exprs",
139 "constants",
140 "identifiers",
141 "vecs",
142 "binfos",
143 "ssa names",
144 "constructors",
145 "random kinds",
146 "lang_decl kinds",
147 "lang_type kinds",
148 "omp clauses",
149 };
150
151 /* Unique id for next decl created. */
152 static GTY(()) int next_decl_uid;
153 /* Unique id for next type created. */
154 static GTY(()) int next_type_uid = 1;
155 /* Unique id for next debug decl created. Use negative numbers,
156 to catch erroneous uses. */
157 static GTY(()) int next_debug_decl_uid;
158
159 /* Since we cannot rehash a type after it is in the table, we have to
160 keep the hash code. */
161
162 struct GTY((for_user)) type_hash {
163 unsigned long hash;
164 tree type;
165 };
166
167 /* Initial size of the hash table (rounded to next prime). */
168 #define TYPE_HASH_INITIAL_SIZE 1000
169
170 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash>
171 {
172 static hashval_t hash (type_hash *t) { return t->hash; }
173 static bool equal (type_hash *a, type_hash *b);
174
175 static int
176 keep_cache_entry (type_hash *&t)
177 {
178 return ggc_marked_p (t->type);
179 }
180 };
181
182 /* Now here is the hash table. When recording a type, it is added to
183 the slot whose index is the hash code. Note that the hash table is
184 used for several kinds of types (function types, array types and
185 array index range types, for now). While all these live in the
186 same table, they are completely independent, and the hash code is
187 computed differently for each of these. */
188
189 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
190
191 /* Hash table and temporary node for larger integer const values. */
192 static GTY (()) tree int_cst_node;
193
194 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
195 {
196 static hashval_t hash (tree t);
197 static bool equal (tree x, tree y);
198 };
199
200 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
201
202 /* Hash table for optimization flags and target option flags. Use the same
203 hash table for both sets of options. Nodes for building the current
204 optimization and target option nodes. The assumption is most of the time
205 the options created will already be in the hash table, so we avoid
206 allocating and freeing up a node repeatably. */
207 static GTY (()) tree cl_optimization_node;
208 static GTY (()) tree cl_target_option_node;
209
210 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node>
211 {
212 static hashval_t hash (tree t);
213 static bool equal (tree x, tree y);
214 };
215
216 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
217
218 /* General tree->tree mapping structure for use in hash tables. */
219
220
221 static GTY ((cache))
222 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
223
224 static GTY ((cache))
225 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
226
227 struct tree_vec_map_cache_hasher : ggc_cache_ptr_hash<tree_vec_map>
228 {
229 static hashval_t hash (tree_vec_map *m) { return DECL_UID (m->base.from); }
230
231 static bool
232 equal (tree_vec_map *a, tree_vec_map *b)
233 {
234 return a->base.from == b->base.from;
235 }
236
237 static int
238 keep_cache_entry (tree_vec_map *&m)
239 {
240 return ggc_marked_p (m->base.from);
241 }
242 };
243
244 static GTY ((cache))
245 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
246
247 static void set_type_quals (tree, int);
248 static void print_type_hash_statistics (void);
249 static void print_debug_expr_statistics (void);
250 static void print_value_expr_statistics (void);
251 static void type_hash_list (const_tree, inchash::hash &);
252 static void attribute_hash_list (const_tree, inchash::hash &);
253
254 tree global_trees[TI_MAX];
255 tree integer_types[itk_none];
256
257 bool int_n_enabled_p[NUM_INT_N_ENTS];
258 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
259
260 unsigned char tree_contains_struct[MAX_TREE_CODES][64];
261
262 /* Number of operands for each OpenMP clause. */
263 unsigned const char omp_clause_num_ops[] =
264 {
265 0, /* OMP_CLAUSE_ERROR */
266 1, /* OMP_CLAUSE_PRIVATE */
267 1, /* OMP_CLAUSE_SHARED */
268 1, /* OMP_CLAUSE_FIRSTPRIVATE */
269 2, /* OMP_CLAUSE_LASTPRIVATE */
270 5, /* OMP_CLAUSE_REDUCTION */
271 1, /* OMP_CLAUSE_COPYIN */
272 1, /* OMP_CLAUSE_COPYPRIVATE */
273 3, /* OMP_CLAUSE_LINEAR */
274 2, /* OMP_CLAUSE_ALIGNED */
275 1, /* OMP_CLAUSE_DEPEND */
276 1, /* OMP_CLAUSE_UNIFORM */
277 1, /* OMP_CLAUSE_TO_DECLARE */
278 1, /* OMP_CLAUSE_LINK */
279 2, /* OMP_CLAUSE_FROM */
280 2, /* OMP_CLAUSE_TO */
281 2, /* OMP_CLAUSE_MAP */
282 1, /* OMP_CLAUSE_USE_DEVICE_PTR */
283 1, /* OMP_CLAUSE_IS_DEVICE_PTR */
284 2, /* OMP_CLAUSE__CACHE_ */
285 2, /* OMP_CLAUSE_GANG */
286 1, /* OMP_CLAUSE_ASYNC */
287 1, /* OMP_CLAUSE_WAIT */
288 0, /* OMP_CLAUSE_AUTO */
289 0, /* OMP_CLAUSE_SEQ */
290 1, /* OMP_CLAUSE__LOOPTEMP_ */
291 1, /* OMP_CLAUSE_IF */
292 1, /* OMP_CLAUSE_NUM_THREADS */
293 1, /* OMP_CLAUSE_SCHEDULE */
294 0, /* OMP_CLAUSE_NOWAIT */
295 1, /* OMP_CLAUSE_ORDERED */
296 0, /* OMP_CLAUSE_DEFAULT */
297 3, /* OMP_CLAUSE_COLLAPSE */
298 0, /* OMP_CLAUSE_UNTIED */
299 1, /* OMP_CLAUSE_FINAL */
300 0, /* OMP_CLAUSE_MERGEABLE */
301 1, /* OMP_CLAUSE_DEVICE */
302 1, /* OMP_CLAUSE_DIST_SCHEDULE */
303 0, /* OMP_CLAUSE_INBRANCH */
304 0, /* OMP_CLAUSE_NOTINBRANCH */
305 1, /* OMP_CLAUSE_NUM_TEAMS */
306 1, /* OMP_CLAUSE_THREAD_LIMIT */
307 0, /* OMP_CLAUSE_PROC_BIND */
308 1, /* OMP_CLAUSE_SAFELEN */
309 1, /* OMP_CLAUSE_SIMDLEN */
310 0, /* OMP_CLAUSE_FOR */
311 0, /* OMP_CLAUSE_PARALLEL */
312 0, /* OMP_CLAUSE_SECTIONS */
313 0, /* OMP_CLAUSE_TASKGROUP */
314 1, /* OMP_CLAUSE_PRIORITY */
315 1, /* OMP_CLAUSE_GRAINSIZE */
316 1, /* OMP_CLAUSE_NUM_TASKS */
317 0, /* OMP_CLAUSE_NOGROUP */
318 0, /* OMP_CLAUSE_THREADS */
319 0, /* OMP_CLAUSE_SIMD */
320 1, /* OMP_CLAUSE_HINT */
321 0, /* OMP_CLAUSE_DEFALTMAP */
322 1, /* OMP_CLAUSE__SIMDUID_ */
323 1, /* OMP_CLAUSE__CILK_FOR_COUNT_ */
324 0, /* OMP_CLAUSE_INDEPENDENT */
325 1, /* OMP_CLAUSE_WORKER */
326 1, /* OMP_CLAUSE_VECTOR */
327 1, /* OMP_CLAUSE_NUM_GANGS */
328 1, /* OMP_CLAUSE_NUM_WORKERS */
329 1, /* OMP_CLAUSE_VECTOR_LENGTH */
330 1, /* OMP_CLAUSE_TILE */
331 2, /* OMP_CLAUSE__GRIDDIM_ */
332 };
333
334 const char * const omp_clause_code_name[] =
335 {
336 "error_clause",
337 "private",
338 "shared",
339 "firstprivate",
340 "lastprivate",
341 "reduction",
342 "copyin",
343 "copyprivate",
344 "linear",
345 "aligned",
346 "depend",
347 "uniform",
348 "to",
349 "link",
350 "from",
351 "to",
352 "map",
353 "use_device_ptr",
354 "is_device_ptr",
355 "_cache_",
356 "gang",
357 "async",
358 "wait",
359 "auto",
360 "seq",
361 "_looptemp_",
362 "if",
363 "num_threads",
364 "schedule",
365 "nowait",
366 "ordered",
367 "default",
368 "collapse",
369 "untied",
370 "final",
371 "mergeable",
372 "device",
373 "dist_schedule",
374 "inbranch",
375 "notinbranch",
376 "num_teams",
377 "thread_limit",
378 "proc_bind",
379 "safelen",
380 "simdlen",
381 "for",
382 "parallel",
383 "sections",
384 "taskgroup",
385 "priority",
386 "grainsize",
387 "num_tasks",
388 "nogroup",
389 "threads",
390 "simd",
391 "hint",
392 "defaultmap",
393 "_simduid_",
394 "_Cilk_for_count_",
395 "independent",
396 "worker",
397 "vector",
398 "num_gangs",
399 "num_workers",
400 "vector_length",
401 "tile",
402 "_griddim_"
403 };
404
405
406 /* Return the tree node structure used by tree code CODE. */
407
408 static inline enum tree_node_structure_enum
409 tree_node_structure_for_code (enum tree_code code)
410 {
411 switch (TREE_CODE_CLASS (code))
412 {
413 case tcc_declaration:
414 {
415 switch (code)
416 {
417 case FIELD_DECL:
418 return TS_FIELD_DECL;
419 case PARM_DECL:
420 return TS_PARM_DECL;
421 case VAR_DECL:
422 return TS_VAR_DECL;
423 case LABEL_DECL:
424 return TS_LABEL_DECL;
425 case RESULT_DECL:
426 return TS_RESULT_DECL;
427 case DEBUG_EXPR_DECL:
428 return TS_DECL_WRTL;
429 case CONST_DECL:
430 return TS_CONST_DECL;
431 case TYPE_DECL:
432 return TS_TYPE_DECL;
433 case FUNCTION_DECL:
434 return TS_FUNCTION_DECL;
435 case TRANSLATION_UNIT_DECL:
436 return TS_TRANSLATION_UNIT_DECL;
437 default:
438 return TS_DECL_NON_COMMON;
439 }
440 }
441 case tcc_type:
442 return TS_TYPE_NON_COMMON;
443 case tcc_reference:
444 case tcc_comparison:
445 case tcc_unary:
446 case tcc_binary:
447 case tcc_expression:
448 case tcc_statement:
449 case tcc_vl_exp:
450 return TS_EXP;
451 default: /* tcc_constant and tcc_exceptional */
452 break;
453 }
454 switch (code)
455 {
456 /* tcc_constant cases. */
457 case VOID_CST: return TS_TYPED;
458 case INTEGER_CST: return TS_INT_CST;
459 case REAL_CST: return TS_REAL_CST;
460 case FIXED_CST: return TS_FIXED_CST;
461 case COMPLEX_CST: return TS_COMPLEX;
462 case VECTOR_CST: return TS_VECTOR;
463 case STRING_CST: return TS_STRING;
464 /* tcc_exceptional cases. */
465 case ERROR_MARK: return TS_COMMON;
466 case IDENTIFIER_NODE: return TS_IDENTIFIER;
467 case TREE_LIST: return TS_LIST;
468 case TREE_VEC: return TS_VEC;
469 case SSA_NAME: return TS_SSA_NAME;
470 case PLACEHOLDER_EXPR: return TS_COMMON;
471 case STATEMENT_LIST: return TS_STATEMENT_LIST;
472 case BLOCK: return TS_BLOCK;
473 case CONSTRUCTOR: return TS_CONSTRUCTOR;
474 case TREE_BINFO: return TS_BINFO;
475 case OMP_CLAUSE: return TS_OMP_CLAUSE;
476 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
477 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
478
479 default:
480 gcc_unreachable ();
481 }
482 }
483
484
485 /* Initialize tree_contains_struct to describe the hierarchy of tree
486 nodes. */
487
488 static void
489 initialize_tree_contains_struct (void)
490 {
491 unsigned i;
492
493 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
494 {
495 enum tree_code code;
496 enum tree_node_structure_enum ts_code;
497
498 code = (enum tree_code) i;
499 ts_code = tree_node_structure_for_code (code);
500
501 /* Mark the TS structure itself. */
502 tree_contains_struct[code][ts_code] = 1;
503
504 /* Mark all the structures that TS is derived from. */
505 switch (ts_code)
506 {
507 case TS_TYPED:
508 case TS_BLOCK:
509 MARK_TS_BASE (code);
510 break;
511
512 case TS_COMMON:
513 case TS_INT_CST:
514 case TS_REAL_CST:
515 case TS_FIXED_CST:
516 case TS_VECTOR:
517 case TS_STRING:
518 case TS_COMPLEX:
519 case TS_SSA_NAME:
520 case TS_CONSTRUCTOR:
521 case TS_EXP:
522 case TS_STATEMENT_LIST:
523 MARK_TS_TYPED (code);
524 break;
525
526 case TS_IDENTIFIER:
527 case TS_DECL_MINIMAL:
528 case TS_TYPE_COMMON:
529 case TS_LIST:
530 case TS_VEC:
531 case TS_BINFO:
532 case TS_OMP_CLAUSE:
533 case TS_OPTIMIZATION:
534 case TS_TARGET_OPTION:
535 MARK_TS_COMMON (code);
536 break;
537
538 case TS_TYPE_WITH_LANG_SPECIFIC:
539 MARK_TS_TYPE_COMMON (code);
540 break;
541
542 case TS_TYPE_NON_COMMON:
543 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
544 break;
545
546 case TS_DECL_COMMON:
547 MARK_TS_DECL_MINIMAL (code);
548 break;
549
550 case TS_DECL_WRTL:
551 case TS_CONST_DECL:
552 MARK_TS_DECL_COMMON (code);
553 break;
554
555 case TS_DECL_NON_COMMON:
556 MARK_TS_DECL_WITH_VIS (code);
557 break;
558
559 case TS_DECL_WITH_VIS:
560 case TS_PARM_DECL:
561 case TS_LABEL_DECL:
562 case TS_RESULT_DECL:
563 MARK_TS_DECL_WRTL (code);
564 break;
565
566 case TS_FIELD_DECL:
567 MARK_TS_DECL_COMMON (code);
568 break;
569
570 case TS_VAR_DECL:
571 MARK_TS_DECL_WITH_VIS (code);
572 break;
573
574 case TS_TYPE_DECL:
575 case TS_FUNCTION_DECL:
576 MARK_TS_DECL_NON_COMMON (code);
577 break;
578
579 case TS_TRANSLATION_UNIT_DECL:
580 MARK_TS_DECL_COMMON (code);
581 break;
582
583 default:
584 gcc_unreachable ();
585 }
586 }
587
588 /* Basic consistency checks for attributes used in fold. */
589 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
590 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
591 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
592 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
593 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
594 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
595 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
596 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
597 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
598 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
599 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
600 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
601 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
602 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
603 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
604 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
605 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
606 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
607 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
608 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
609 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
610 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
611 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
612 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
613 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
614 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
615 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
616 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
617 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
618 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
619 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
620 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
621 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
622 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
623 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
624 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
625 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
626 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
627 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
628 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
629 }
630
631
632 /* Init tree.c. */
633
634 void
635 init_ttree (void)
636 {
637 /* Initialize the hash table of types. */
638 type_hash_table
639 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
640
641 debug_expr_for_decl
642 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
643
644 value_expr_for_decl
645 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
646
647 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
648
649 int_cst_node = make_int_cst (1, 1);
650
651 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
652
653 cl_optimization_node = make_node (OPTIMIZATION_NODE);
654 cl_target_option_node = make_node (TARGET_OPTION_NODE);
655
656 /* Initialize the tree_contains_struct array. */
657 initialize_tree_contains_struct ();
658 lang_hooks.init_ts ();
659 }
660
661 \f
662 /* The name of the object as the assembler will see it (but before any
663 translations made by ASM_OUTPUT_LABELREF). Often this is the same
664 as DECL_NAME. It is an IDENTIFIER_NODE. */
665 tree
666 decl_assembler_name (tree decl)
667 {
668 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
669 lang_hooks.set_decl_assembler_name (decl);
670 return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name;
671 }
672
673 /* When the target supports COMDAT groups, this indicates which group the
674 DECL is associated with. This can be either an IDENTIFIER_NODE or a
675 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
676 tree
677 decl_comdat_group (const_tree node)
678 {
679 struct symtab_node *snode = symtab_node::get (node);
680 if (!snode)
681 return NULL;
682 return snode->get_comdat_group ();
683 }
684
685 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
686 tree
687 decl_comdat_group_id (const_tree node)
688 {
689 struct symtab_node *snode = symtab_node::get (node);
690 if (!snode)
691 return NULL;
692 return snode->get_comdat_group_id ();
693 }
694
695 /* When the target supports named section, return its name as IDENTIFIER_NODE
696 or NULL if it is in no section. */
697 const char *
698 decl_section_name (const_tree node)
699 {
700 struct symtab_node *snode = symtab_node::get (node);
701 if (!snode)
702 return NULL;
703 return snode->get_section ();
704 }
705
706 /* Set section name of NODE to VALUE (that is expected to be
707 identifier node) */
708 void
709 set_decl_section_name (tree node, const char *value)
710 {
711 struct symtab_node *snode;
712
713 if (value == NULL)
714 {
715 snode = symtab_node::get (node);
716 if (!snode)
717 return;
718 }
719 else if (TREE_CODE (node) == VAR_DECL)
720 snode = varpool_node::get_create (node);
721 else
722 snode = cgraph_node::get_create (node);
723 snode->set_section (value);
724 }
725
726 /* Return TLS model of a variable NODE. */
727 enum tls_model
728 decl_tls_model (const_tree node)
729 {
730 struct varpool_node *snode = varpool_node::get (node);
731 if (!snode)
732 return TLS_MODEL_NONE;
733 return snode->tls_model;
734 }
735
736 /* Set TLS model of variable NODE to MODEL. */
737 void
738 set_decl_tls_model (tree node, enum tls_model model)
739 {
740 struct varpool_node *vnode;
741
742 if (model == TLS_MODEL_NONE)
743 {
744 vnode = varpool_node::get (node);
745 if (!vnode)
746 return;
747 }
748 else
749 vnode = varpool_node::get_create (node);
750 vnode->tls_model = model;
751 }
752
753 /* Compute the number of bytes occupied by a tree with code CODE.
754 This function cannot be used for nodes that have variable sizes,
755 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
756 size_t
757 tree_code_size (enum tree_code code)
758 {
759 switch (TREE_CODE_CLASS (code))
760 {
761 case tcc_declaration: /* A decl node */
762 {
763 switch (code)
764 {
765 case FIELD_DECL:
766 return sizeof (struct tree_field_decl);
767 case PARM_DECL:
768 return sizeof (struct tree_parm_decl);
769 case VAR_DECL:
770 return sizeof (struct tree_var_decl);
771 case LABEL_DECL:
772 return sizeof (struct tree_label_decl);
773 case RESULT_DECL:
774 return sizeof (struct tree_result_decl);
775 case CONST_DECL:
776 return sizeof (struct tree_const_decl);
777 case TYPE_DECL:
778 return sizeof (struct tree_type_decl);
779 case FUNCTION_DECL:
780 return sizeof (struct tree_function_decl);
781 case DEBUG_EXPR_DECL:
782 return sizeof (struct tree_decl_with_rtl);
783 case TRANSLATION_UNIT_DECL:
784 return sizeof (struct tree_translation_unit_decl);
785 case NAMESPACE_DECL:
786 case IMPORTED_DECL:
787 case NAMELIST_DECL:
788 return sizeof (struct tree_decl_non_common);
789 default:
790 return lang_hooks.tree_size (code);
791 }
792 }
793
794 case tcc_type: /* a type node */
795 return sizeof (struct tree_type_non_common);
796
797 case tcc_reference: /* a reference */
798 case tcc_expression: /* an expression */
799 case tcc_statement: /* an expression with side effects */
800 case tcc_comparison: /* a comparison expression */
801 case tcc_unary: /* a unary arithmetic expression */
802 case tcc_binary: /* a binary arithmetic expression */
803 return (sizeof (struct tree_exp)
804 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
805
806 case tcc_constant: /* a constant */
807 switch (code)
808 {
809 case VOID_CST: return sizeof (struct tree_typed);
810 case INTEGER_CST: gcc_unreachable ();
811 case REAL_CST: return sizeof (struct tree_real_cst);
812 case FIXED_CST: return sizeof (struct tree_fixed_cst);
813 case COMPLEX_CST: return sizeof (struct tree_complex);
814 case VECTOR_CST: return sizeof (struct tree_vector);
815 case STRING_CST: gcc_unreachable ();
816 default:
817 return lang_hooks.tree_size (code);
818 }
819
820 case tcc_exceptional: /* something random, like an identifier. */
821 switch (code)
822 {
823 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
824 case TREE_LIST: return sizeof (struct tree_list);
825
826 case ERROR_MARK:
827 case PLACEHOLDER_EXPR: return sizeof (struct tree_common);
828
829 case TREE_VEC:
830 case OMP_CLAUSE: gcc_unreachable ();
831
832 case SSA_NAME: return sizeof (struct tree_ssa_name);
833
834 case STATEMENT_LIST: return sizeof (struct tree_statement_list);
835 case BLOCK: return sizeof (struct tree_block);
836 case CONSTRUCTOR: return sizeof (struct tree_constructor);
837 case OPTIMIZATION_NODE: return sizeof (struct tree_optimization_option);
838 case TARGET_OPTION_NODE: return sizeof (struct tree_target_option);
839
840 default:
841 return lang_hooks.tree_size (code);
842 }
843
844 default:
845 gcc_unreachable ();
846 }
847 }
848
849 /* Compute the number of bytes occupied by NODE. This routine only
850 looks at TREE_CODE, except for those nodes that have variable sizes. */
851 size_t
852 tree_size (const_tree node)
853 {
854 const enum tree_code code = TREE_CODE (node);
855 switch (code)
856 {
857 case INTEGER_CST:
858 return (sizeof (struct tree_int_cst)
859 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
860
861 case TREE_BINFO:
862 return (offsetof (struct tree_binfo, base_binfos)
863 + vec<tree, va_gc>
864 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
865
866 case TREE_VEC:
867 return (sizeof (struct tree_vec)
868 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
869
870 case VECTOR_CST:
871 return (sizeof (struct tree_vector)
872 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node)) - 1) * sizeof (tree));
873
874 case STRING_CST:
875 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
876
877 case OMP_CLAUSE:
878 return (sizeof (struct tree_omp_clause)
879 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
880 * sizeof (tree));
881
882 default:
883 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
884 return (sizeof (struct tree_exp)
885 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
886 else
887 return tree_code_size (code);
888 }
889 }
890
891 /* Record interesting allocation statistics for a tree node with CODE
892 and LENGTH. */
893
894 static void
895 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
896 size_t length ATTRIBUTE_UNUSED)
897 {
898 enum tree_code_class type = TREE_CODE_CLASS (code);
899 tree_node_kind kind;
900
901 if (!GATHER_STATISTICS)
902 return;
903
904 switch (type)
905 {
906 case tcc_declaration: /* A decl node */
907 kind = d_kind;
908 break;
909
910 case tcc_type: /* a type node */
911 kind = t_kind;
912 break;
913
914 case tcc_statement: /* an expression with side effects */
915 kind = s_kind;
916 break;
917
918 case tcc_reference: /* a reference */
919 kind = r_kind;
920 break;
921
922 case tcc_expression: /* an expression */
923 case tcc_comparison: /* a comparison expression */
924 case tcc_unary: /* a unary arithmetic expression */
925 case tcc_binary: /* a binary arithmetic expression */
926 kind = e_kind;
927 break;
928
929 case tcc_constant: /* a constant */
930 kind = c_kind;
931 break;
932
933 case tcc_exceptional: /* something random, like an identifier. */
934 switch (code)
935 {
936 case IDENTIFIER_NODE:
937 kind = id_kind;
938 break;
939
940 case TREE_VEC:
941 kind = vec_kind;
942 break;
943
944 case TREE_BINFO:
945 kind = binfo_kind;
946 break;
947
948 case SSA_NAME:
949 kind = ssa_name_kind;
950 break;
951
952 case BLOCK:
953 kind = b_kind;
954 break;
955
956 case CONSTRUCTOR:
957 kind = constr_kind;
958 break;
959
960 case OMP_CLAUSE:
961 kind = omp_clause_kind;
962 break;
963
964 default:
965 kind = x_kind;
966 break;
967 }
968 break;
969
970 case tcc_vl_exp:
971 kind = e_kind;
972 break;
973
974 default:
975 gcc_unreachable ();
976 }
977
978 tree_code_counts[(int) code]++;
979 tree_node_counts[(int) kind]++;
980 tree_node_sizes[(int) kind] += length;
981 }
982
983 /* Allocate and return a new UID from the DECL_UID namespace. */
984
985 int
986 allocate_decl_uid (void)
987 {
988 return next_decl_uid++;
989 }
990
991 /* Return a newly allocated node of code CODE. For decl and type
992 nodes, some other fields are initialized. The rest of the node is
993 initialized to zero. This function cannot be used for TREE_VEC,
994 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
995 tree_code_size.
996
997 Achoo! I got a code in the node. */
998
999 tree
1000 make_node_stat (enum tree_code code MEM_STAT_DECL)
1001 {
1002 tree t;
1003 enum tree_code_class type = TREE_CODE_CLASS (code);
1004 size_t length = tree_code_size (code);
1005
1006 record_node_allocation_statistics (code, length);
1007
1008 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1009 TREE_SET_CODE (t, code);
1010
1011 switch (type)
1012 {
1013 case tcc_statement:
1014 TREE_SIDE_EFFECTS (t) = 1;
1015 break;
1016
1017 case tcc_declaration:
1018 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1019 {
1020 if (code == FUNCTION_DECL)
1021 {
1022 SET_DECL_ALIGN (t, FUNCTION_BOUNDARY);
1023 DECL_MODE (t) = FUNCTION_MODE;
1024 }
1025 else
1026 SET_DECL_ALIGN (t, 1);
1027 }
1028 DECL_SOURCE_LOCATION (t) = input_location;
1029 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1030 DECL_UID (t) = --next_debug_decl_uid;
1031 else
1032 {
1033 DECL_UID (t) = allocate_decl_uid ();
1034 SET_DECL_PT_UID (t, -1);
1035 }
1036 if (TREE_CODE (t) == LABEL_DECL)
1037 LABEL_DECL_UID (t) = -1;
1038
1039 break;
1040
1041 case tcc_type:
1042 TYPE_UID (t) = next_type_uid++;
1043 SET_TYPE_ALIGN (t, BITS_PER_UNIT);
1044 TYPE_USER_ALIGN (t) = 0;
1045 TYPE_MAIN_VARIANT (t) = t;
1046 TYPE_CANONICAL (t) = t;
1047
1048 /* Default to no attributes for type, but let target change that. */
1049 TYPE_ATTRIBUTES (t) = NULL_TREE;
1050 targetm.set_default_type_attributes (t);
1051
1052 /* We have not yet computed the alias set for this type. */
1053 TYPE_ALIAS_SET (t) = -1;
1054 break;
1055
1056 case tcc_constant:
1057 TREE_CONSTANT (t) = 1;
1058 break;
1059
1060 case tcc_expression:
1061 switch (code)
1062 {
1063 case INIT_EXPR:
1064 case MODIFY_EXPR:
1065 case VA_ARG_EXPR:
1066 case PREDECREMENT_EXPR:
1067 case PREINCREMENT_EXPR:
1068 case POSTDECREMENT_EXPR:
1069 case POSTINCREMENT_EXPR:
1070 /* All of these have side-effects, no matter what their
1071 operands are. */
1072 TREE_SIDE_EFFECTS (t) = 1;
1073 break;
1074
1075 default:
1076 break;
1077 }
1078 break;
1079
1080 case tcc_exceptional:
1081 switch (code)
1082 {
1083 case TARGET_OPTION_NODE:
1084 TREE_TARGET_OPTION(t)
1085 = ggc_cleared_alloc<struct cl_target_option> ();
1086 break;
1087
1088 case OPTIMIZATION_NODE:
1089 TREE_OPTIMIZATION (t)
1090 = ggc_cleared_alloc<struct cl_optimization> ();
1091 break;
1092
1093 default:
1094 break;
1095 }
1096 break;
1097
1098 default:
1099 /* Other classes need no special treatment. */
1100 break;
1101 }
1102
1103 return t;
1104 }
1105
1106 /* Free tree node. */
1107
1108 void
1109 free_node (tree node)
1110 {
1111 enum tree_code code = TREE_CODE (node);
1112 if (GATHER_STATISTICS)
1113 {
1114 tree_code_counts[(int) TREE_CODE (node)]--;
1115 tree_node_counts[(int) t_kind]--;
1116 tree_node_sizes[(int) t_kind] -= tree_size (node);
1117 }
1118 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1119 vec_free (CONSTRUCTOR_ELTS (node));
1120 else if (code == BLOCK)
1121 vec_free (BLOCK_NONLOCALIZED_VARS (node));
1122 else if (code == TREE_BINFO)
1123 vec_free (BINFO_BASE_ACCESSES (node));
1124 ggc_free (node);
1125 }
1126 \f
1127 /* Return a new node with the same contents as NODE except that its
1128 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1129
1130 tree
1131 copy_node_stat (tree node MEM_STAT_DECL)
1132 {
1133 tree t;
1134 enum tree_code code = TREE_CODE (node);
1135 size_t length;
1136
1137 gcc_assert (code != STATEMENT_LIST);
1138
1139 length = tree_size (node);
1140 record_node_allocation_statistics (code, length);
1141 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1142 memcpy (t, node, length);
1143
1144 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1145 TREE_CHAIN (t) = 0;
1146 TREE_ASM_WRITTEN (t) = 0;
1147 TREE_VISITED (t) = 0;
1148
1149 if (TREE_CODE_CLASS (code) == tcc_declaration)
1150 {
1151 if (code == DEBUG_EXPR_DECL)
1152 DECL_UID (t) = --next_debug_decl_uid;
1153 else
1154 {
1155 DECL_UID (t) = allocate_decl_uid ();
1156 if (DECL_PT_UID_SET_P (node))
1157 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1158 }
1159 if ((TREE_CODE (node) == PARM_DECL || TREE_CODE (node) == VAR_DECL)
1160 && DECL_HAS_VALUE_EXPR_P (node))
1161 {
1162 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1163 DECL_HAS_VALUE_EXPR_P (t) = 1;
1164 }
1165 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1166 if (TREE_CODE (node) == VAR_DECL)
1167 {
1168 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1169 t->decl_with_vis.symtab_node = NULL;
1170 }
1171 if (TREE_CODE (node) == VAR_DECL && DECL_HAS_INIT_PRIORITY_P (node))
1172 {
1173 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1174 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1175 }
1176 if (TREE_CODE (node) == FUNCTION_DECL)
1177 {
1178 DECL_STRUCT_FUNCTION (t) = NULL;
1179 t->decl_with_vis.symtab_node = NULL;
1180 }
1181 }
1182 else if (TREE_CODE_CLASS (code) == tcc_type)
1183 {
1184 TYPE_UID (t) = next_type_uid++;
1185 /* The following is so that the debug code for
1186 the copy is different from the original type.
1187 The two statements usually duplicate each other
1188 (because they clear fields of the same union),
1189 but the optimizer should catch that. */
1190 TYPE_SYMTAB_POINTER (t) = 0;
1191 TYPE_SYMTAB_ADDRESS (t) = 0;
1192
1193 /* Do not copy the values cache. */
1194 if (TYPE_CACHED_VALUES_P (t))
1195 {
1196 TYPE_CACHED_VALUES_P (t) = 0;
1197 TYPE_CACHED_VALUES (t) = NULL_TREE;
1198 }
1199 }
1200 else if (code == TARGET_OPTION_NODE)
1201 {
1202 TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>();
1203 memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node),
1204 sizeof (struct cl_target_option));
1205 }
1206 else if (code == OPTIMIZATION_NODE)
1207 {
1208 TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>();
1209 memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node),
1210 sizeof (struct cl_optimization));
1211 }
1212
1213 return t;
1214 }
1215
1216 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1217 For example, this can copy a list made of TREE_LIST nodes. */
1218
1219 tree
1220 copy_list (tree list)
1221 {
1222 tree head;
1223 tree prev, next;
1224
1225 if (list == 0)
1226 return 0;
1227
1228 head = prev = copy_node (list);
1229 next = TREE_CHAIN (list);
1230 while (next)
1231 {
1232 TREE_CHAIN (prev) = copy_node (next);
1233 prev = TREE_CHAIN (prev);
1234 next = TREE_CHAIN (next);
1235 }
1236 return head;
1237 }
1238
1239 \f
1240 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1241 INTEGER_CST with value CST and type TYPE. */
1242
1243 static unsigned int
1244 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1245 {
1246 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1247 /* We need extra HWIs if CST is an unsigned integer with its
1248 upper bit set. */
1249 if (TYPE_UNSIGNED (type) && wi::neg_p (cst))
1250 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1251 return cst.get_len ();
1252 }
1253
1254 /* Return a new INTEGER_CST with value CST and type TYPE. */
1255
1256 static tree
1257 build_new_int_cst (tree type, const wide_int &cst)
1258 {
1259 unsigned int len = cst.get_len ();
1260 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1261 tree nt = make_int_cst (len, ext_len);
1262
1263 if (len < ext_len)
1264 {
1265 --ext_len;
1266 TREE_INT_CST_ELT (nt, ext_len)
1267 = zext_hwi (-1, cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1268 for (unsigned int i = len; i < ext_len; ++i)
1269 TREE_INT_CST_ELT (nt, i) = -1;
1270 }
1271 else if (TYPE_UNSIGNED (type)
1272 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1273 {
1274 len--;
1275 TREE_INT_CST_ELT (nt, len)
1276 = zext_hwi (cst.elt (len),
1277 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1278 }
1279
1280 for (unsigned int i = 0; i < len; i++)
1281 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1282 TREE_TYPE (nt) = type;
1283 return nt;
1284 }
1285
1286 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1287
1288 tree
1289 build_int_cst (tree type, HOST_WIDE_INT low)
1290 {
1291 /* Support legacy code. */
1292 if (!type)
1293 type = integer_type_node;
1294
1295 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1296 }
1297
1298 tree
1299 build_int_cstu (tree type, unsigned HOST_WIDE_INT cst)
1300 {
1301 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1302 }
1303
1304 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1305
1306 tree
1307 build_int_cst_type (tree type, HOST_WIDE_INT low)
1308 {
1309 gcc_assert (type);
1310 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1311 }
1312
1313 /* Constructs tree in type TYPE from with value given by CST. Signedness
1314 of CST is assumed to be the same as the signedness of TYPE. */
1315
1316 tree
1317 double_int_to_tree (tree type, double_int cst)
1318 {
1319 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1320 }
1321
1322 /* We force the wide_int CST to the range of the type TYPE by sign or
1323 zero extending it. OVERFLOWABLE indicates if we are interested in
1324 overflow of the value, when >0 we are only interested in signed
1325 overflow, for <0 we are interested in any overflow. OVERFLOWED
1326 indicates whether overflow has already occurred. CONST_OVERFLOWED
1327 indicates whether constant overflow has already occurred. We force
1328 T's value to be within range of T's type (by setting to 0 or 1 all
1329 the bits outside the type's range). We set TREE_OVERFLOWED if,
1330 OVERFLOWED is nonzero,
1331 or OVERFLOWABLE is >0 and signed overflow occurs
1332 or OVERFLOWABLE is <0 and any overflow occurs
1333 We return a new tree node for the extended wide_int. The node
1334 is shared if no overflow flags are set. */
1335
1336
1337 tree
1338 force_fit_type (tree type, const wide_int_ref &cst,
1339 int overflowable, bool overflowed)
1340 {
1341 signop sign = TYPE_SIGN (type);
1342
1343 /* If we need to set overflow flags, return a new unshared node. */
1344 if (overflowed || !wi::fits_to_tree_p (cst, type))
1345 {
1346 if (overflowed
1347 || overflowable < 0
1348 || (overflowable > 0 && sign == SIGNED))
1349 {
1350 wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign);
1351 tree t = build_new_int_cst (type, tmp);
1352 TREE_OVERFLOW (t) = 1;
1353 return t;
1354 }
1355 }
1356
1357 /* Else build a shared node. */
1358 return wide_int_to_tree (type, cst);
1359 }
1360
1361 /* These are the hash table functions for the hash table of INTEGER_CST
1362 nodes of a sizetype. */
1363
1364 /* Return the hash code X, an INTEGER_CST. */
1365
1366 hashval_t
1367 int_cst_hasher::hash (tree x)
1368 {
1369 const_tree const t = x;
1370 hashval_t code = TYPE_UID (TREE_TYPE (t));
1371 int i;
1372
1373 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1374 code = iterative_hash_host_wide_int (TREE_INT_CST_ELT(t, i), code);
1375
1376 return code;
1377 }
1378
1379 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1380 is the same as that given by *Y, which is the same. */
1381
1382 bool
1383 int_cst_hasher::equal (tree x, tree y)
1384 {
1385 const_tree const xt = x;
1386 const_tree const yt = y;
1387
1388 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1389 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1390 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1391 return false;
1392
1393 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1394 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1395 return false;
1396
1397 return true;
1398 }
1399
1400 /* Create an INT_CST node of TYPE and value CST.
1401 The returned node is always shared. For small integers we use a
1402 per-type vector cache, for larger ones we use a single hash table.
1403 The value is extended from its precision according to the sign of
1404 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1405 the upper bits and ensures that hashing and value equality based
1406 upon the underlying HOST_WIDE_INTs works without masking. */
1407
1408 tree
1409 wide_int_to_tree (tree type, const wide_int_ref &pcst)
1410 {
1411 tree t;
1412 int ix = -1;
1413 int limit = 0;
1414
1415 gcc_assert (type);
1416 unsigned int prec = TYPE_PRECISION (type);
1417 signop sgn = TYPE_SIGN (type);
1418
1419 /* Verify that everything is canonical. */
1420 int l = pcst.get_len ();
1421 if (l > 1)
1422 {
1423 if (pcst.elt (l - 1) == 0)
1424 gcc_checking_assert (pcst.elt (l - 2) < 0);
1425 if (pcst.elt (l - 1) == (HOST_WIDE_INT) -1)
1426 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1427 }
1428
1429 wide_int cst = wide_int::from (pcst, prec, sgn);
1430 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1431
1432 if (ext_len == 1)
1433 {
1434 /* We just need to store a single HOST_WIDE_INT. */
1435 HOST_WIDE_INT hwi;
1436 if (TYPE_UNSIGNED (type))
1437 hwi = cst.to_uhwi ();
1438 else
1439 hwi = cst.to_shwi ();
1440
1441 switch (TREE_CODE (type))
1442 {
1443 case NULLPTR_TYPE:
1444 gcc_assert (hwi == 0);
1445 /* Fallthru. */
1446
1447 case POINTER_TYPE:
1448 case REFERENCE_TYPE:
1449 case POINTER_BOUNDS_TYPE:
1450 /* Cache NULL pointer and zero bounds. */
1451 if (hwi == 0)
1452 {
1453 limit = 1;
1454 ix = 0;
1455 }
1456 break;
1457
1458 case BOOLEAN_TYPE:
1459 /* Cache false or true. */
1460 limit = 2;
1461 if (IN_RANGE (hwi, 0, 1))
1462 ix = hwi;
1463 break;
1464
1465 case INTEGER_TYPE:
1466 case OFFSET_TYPE:
1467 if (TYPE_SIGN (type) == UNSIGNED)
1468 {
1469 /* Cache [0, N). */
1470 limit = INTEGER_SHARE_LIMIT;
1471 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1))
1472 ix = hwi;
1473 }
1474 else
1475 {
1476 /* Cache [-1, N). */
1477 limit = INTEGER_SHARE_LIMIT + 1;
1478 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1))
1479 ix = hwi + 1;
1480 }
1481 break;
1482
1483 case ENUMERAL_TYPE:
1484 break;
1485
1486 default:
1487 gcc_unreachable ();
1488 }
1489
1490 if (ix >= 0)
1491 {
1492 /* Look for it in the type's vector of small shared ints. */
1493 if (!TYPE_CACHED_VALUES_P (type))
1494 {
1495 TYPE_CACHED_VALUES_P (type) = 1;
1496 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1497 }
1498
1499 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1500 if (t)
1501 /* Make sure no one is clobbering the shared constant. */
1502 gcc_checking_assert (TREE_TYPE (t) == type
1503 && TREE_INT_CST_NUNITS (t) == 1
1504 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1505 && TREE_INT_CST_EXT_NUNITS (t) == 1
1506 && TREE_INT_CST_ELT (t, 0) == hwi);
1507 else
1508 {
1509 /* Create a new shared int. */
1510 t = build_new_int_cst (type, cst);
1511 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1512 }
1513 }
1514 else
1515 {
1516 /* Use the cache of larger shared ints, using int_cst_node as
1517 a temporary. */
1518
1519 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1520 TREE_TYPE (int_cst_node) = type;
1521
1522 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1523 t = *slot;
1524 if (!t)
1525 {
1526 /* Insert this one into the hash table. */
1527 t = int_cst_node;
1528 *slot = t;
1529 /* Make a new node for next time round. */
1530 int_cst_node = make_int_cst (1, 1);
1531 }
1532 }
1533 }
1534 else
1535 {
1536 /* The value either hashes properly or we drop it on the floor
1537 for the gc to take care of. There will not be enough of them
1538 to worry about. */
1539
1540 tree nt = build_new_int_cst (type, cst);
1541 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1542 t = *slot;
1543 if (!t)
1544 {
1545 /* Insert this one into the hash table. */
1546 t = nt;
1547 *slot = t;
1548 }
1549 }
1550
1551 return t;
1552 }
1553
1554 void
1555 cache_integer_cst (tree t)
1556 {
1557 tree type = TREE_TYPE (t);
1558 int ix = -1;
1559 int limit = 0;
1560 int prec = TYPE_PRECISION (type);
1561
1562 gcc_assert (!TREE_OVERFLOW (t));
1563
1564 switch (TREE_CODE (type))
1565 {
1566 case NULLPTR_TYPE:
1567 gcc_assert (integer_zerop (t));
1568 /* Fallthru. */
1569
1570 case POINTER_TYPE:
1571 case REFERENCE_TYPE:
1572 /* Cache NULL pointer. */
1573 if (integer_zerop (t))
1574 {
1575 limit = 1;
1576 ix = 0;
1577 }
1578 break;
1579
1580 case BOOLEAN_TYPE:
1581 /* Cache false or true. */
1582 limit = 2;
1583 if (wi::ltu_p (t, 2))
1584 ix = TREE_INT_CST_ELT (t, 0);
1585 break;
1586
1587 case INTEGER_TYPE:
1588 case OFFSET_TYPE:
1589 if (TYPE_UNSIGNED (type))
1590 {
1591 /* Cache 0..N */
1592 limit = INTEGER_SHARE_LIMIT;
1593
1594 /* This is a little hokie, but if the prec is smaller than
1595 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1596 obvious test will not get the correct answer. */
1597 if (prec < HOST_BITS_PER_WIDE_INT)
1598 {
1599 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1600 ix = tree_to_uhwi (t);
1601 }
1602 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1603 ix = tree_to_uhwi (t);
1604 }
1605 else
1606 {
1607 /* Cache -1..N */
1608 limit = INTEGER_SHARE_LIMIT + 1;
1609
1610 if (integer_minus_onep (t))
1611 ix = 0;
1612 else if (!wi::neg_p (t))
1613 {
1614 if (prec < HOST_BITS_PER_WIDE_INT)
1615 {
1616 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1617 ix = tree_to_shwi (t) + 1;
1618 }
1619 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1620 ix = tree_to_shwi (t) + 1;
1621 }
1622 }
1623 break;
1624
1625 case ENUMERAL_TYPE:
1626 break;
1627
1628 default:
1629 gcc_unreachable ();
1630 }
1631
1632 if (ix >= 0)
1633 {
1634 /* Look for it in the type's vector of small shared ints. */
1635 if (!TYPE_CACHED_VALUES_P (type))
1636 {
1637 TYPE_CACHED_VALUES_P (type) = 1;
1638 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1639 }
1640
1641 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1642 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1643 }
1644 else
1645 {
1646 /* Use the cache of larger shared ints. */
1647 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1648 /* If there is already an entry for the number verify it's the
1649 same. */
1650 if (*slot)
1651 gcc_assert (wi::eq_p (tree (*slot), t));
1652 else
1653 /* Otherwise insert this one into the hash table. */
1654 *slot = t;
1655 }
1656 }
1657
1658
1659 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1660 and the rest are zeros. */
1661
1662 tree
1663 build_low_bits_mask (tree type, unsigned bits)
1664 {
1665 gcc_assert (bits <= TYPE_PRECISION (type));
1666
1667 return wide_int_to_tree (type, wi::mask (bits, false,
1668 TYPE_PRECISION (type)));
1669 }
1670
1671 /* Checks that X is integer constant that can be expressed in (unsigned)
1672 HOST_WIDE_INT without loss of precision. */
1673
1674 bool
1675 cst_and_fits_in_hwi (const_tree x)
1676 {
1677 return (TREE_CODE (x) == INTEGER_CST
1678 && TYPE_PRECISION (TREE_TYPE (x)) <= HOST_BITS_PER_WIDE_INT);
1679 }
1680
1681 /* Build a newly constructed VECTOR_CST node of length LEN. */
1682
1683 tree
1684 make_vector_stat (unsigned len MEM_STAT_DECL)
1685 {
1686 tree t;
1687 unsigned length = (len - 1) * sizeof (tree) + sizeof (struct tree_vector);
1688
1689 record_node_allocation_statistics (VECTOR_CST, length);
1690
1691 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1692
1693 TREE_SET_CODE (t, VECTOR_CST);
1694 TREE_CONSTANT (t) = 1;
1695
1696 return t;
1697 }
1698
1699 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1700 are in a list pointed to by VALS. */
1701
1702 tree
1703 build_vector_stat (tree type, tree *vals MEM_STAT_DECL)
1704 {
1705 int over = 0;
1706 unsigned cnt = 0;
1707 tree v = make_vector (TYPE_VECTOR_SUBPARTS (type));
1708 TREE_TYPE (v) = type;
1709
1710 /* Iterate through elements and check for overflow. */
1711 for (cnt = 0; cnt < TYPE_VECTOR_SUBPARTS (type); ++cnt)
1712 {
1713 tree value = vals[cnt];
1714
1715 VECTOR_CST_ELT (v, cnt) = value;
1716
1717 /* Don't crash if we get an address constant. */
1718 if (!CONSTANT_CLASS_P (value))
1719 continue;
1720
1721 over |= TREE_OVERFLOW (value);
1722 }
1723
1724 TREE_OVERFLOW (v) = over;
1725 return v;
1726 }
1727
1728 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1729 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1730
1731 tree
1732 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1733 {
1734 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
1735 unsigned HOST_WIDE_INT idx, pos = 0;
1736 tree value;
1737
1738 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1739 {
1740 if (TREE_CODE (value) == VECTOR_CST)
1741 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
1742 vec[pos++] = VECTOR_CST_ELT (value, i);
1743 else
1744 vec[pos++] = value;
1745 }
1746 while (pos < TYPE_VECTOR_SUBPARTS (type))
1747 vec[pos++] = build_zero_cst (TREE_TYPE (type));
1748
1749 return build_vector (type, vec);
1750 }
1751
1752 /* Build a vector of type VECTYPE where all the elements are SCs. */
1753 tree
1754 build_vector_from_val (tree vectype, tree sc)
1755 {
1756 int i, nunits = TYPE_VECTOR_SUBPARTS (vectype);
1757
1758 if (sc == error_mark_node)
1759 return sc;
1760
1761 /* Verify that the vector type is suitable for SC. Note that there
1762 is some inconsistency in the type-system with respect to restrict
1763 qualifications of pointers. Vector types always have a main-variant
1764 element type and the qualification is applied to the vector-type.
1765 So TREE_TYPE (vector-type) does not return a properly qualified
1766 vector element-type. */
1767 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1768 TREE_TYPE (vectype)));
1769
1770 if (CONSTANT_CLASS_P (sc))
1771 {
1772 tree *v = XALLOCAVEC (tree, nunits);
1773 for (i = 0; i < nunits; ++i)
1774 v[i] = sc;
1775 return build_vector (vectype, v);
1776 }
1777 else
1778 {
1779 vec<constructor_elt, va_gc> *v;
1780 vec_alloc (v, nunits);
1781 for (i = 0; i < nunits; ++i)
1782 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1783 return build_constructor (vectype, v);
1784 }
1785 }
1786
1787 /* Something has messed with the elements of CONSTRUCTOR C after it was built;
1788 calculate TREE_CONSTANT and TREE_SIDE_EFFECTS. */
1789
1790 void
1791 recompute_constructor_flags (tree c)
1792 {
1793 unsigned int i;
1794 tree val;
1795 bool constant_p = true;
1796 bool side_effects_p = false;
1797 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
1798
1799 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
1800 {
1801 /* Mostly ctors will have elts that don't have side-effects, so
1802 the usual case is to scan all the elements. Hence a single
1803 loop for both const and side effects, rather than one loop
1804 each (with early outs). */
1805 if (!TREE_CONSTANT (val))
1806 constant_p = false;
1807 if (TREE_SIDE_EFFECTS (val))
1808 side_effects_p = true;
1809 }
1810
1811 TREE_SIDE_EFFECTS (c) = side_effects_p;
1812 TREE_CONSTANT (c) = constant_p;
1813 }
1814
1815 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for
1816 CONSTRUCTOR C. */
1817
1818 void
1819 verify_constructor_flags (tree c)
1820 {
1821 unsigned int i;
1822 tree val;
1823 bool constant_p = TREE_CONSTANT (c);
1824 bool side_effects_p = TREE_SIDE_EFFECTS (c);
1825 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
1826
1827 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
1828 {
1829 if (constant_p && !TREE_CONSTANT (val))
1830 internal_error ("non-constant element in constant CONSTRUCTOR");
1831 if (!side_effects_p && TREE_SIDE_EFFECTS (val))
1832 internal_error ("side-effects element in no-side-effects CONSTRUCTOR");
1833 }
1834 }
1835
1836 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1837 are in the vec pointed to by VALS. */
1838 tree
1839 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
1840 {
1841 tree c = make_node (CONSTRUCTOR);
1842
1843 TREE_TYPE (c) = type;
1844 CONSTRUCTOR_ELTS (c) = vals;
1845
1846 recompute_constructor_flags (c);
1847
1848 return c;
1849 }
1850
1851 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1852 INDEX and VALUE. */
1853 tree
1854 build_constructor_single (tree type, tree index, tree value)
1855 {
1856 vec<constructor_elt, va_gc> *v;
1857 constructor_elt elt = {index, value};
1858
1859 vec_alloc (v, 1);
1860 v->quick_push (elt);
1861
1862 return build_constructor (type, v);
1863 }
1864
1865
1866 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1867 are in a list pointed to by VALS. */
1868 tree
1869 build_constructor_from_list (tree type, tree vals)
1870 {
1871 tree t;
1872 vec<constructor_elt, va_gc> *v = NULL;
1873
1874 if (vals)
1875 {
1876 vec_alloc (v, list_length (vals));
1877 for (t = vals; t; t = TREE_CHAIN (t))
1878 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
1879 }
1880
1881 return build_constructor (type, v);
1882 }
1883
1884 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1885 of elements, provided as index/value pairs. */
1886
1887 tree
1888 build_constructor_va (tree type, int nelts, ...)
1889 {
1890 vec<constructor_elt, va_gc> *v = NULL;
1891 va_list p;
1892
1893 va_start (p, nelts);
1894 vec_alloc (v, nelts);
1895 while (nelts--)
1896 {
1897 tree index = va_arg (p, tree);
1898 tree value = va_arg (p, tree);
1899 CONSTRUCTOR_APPEND_ELT (v, index, value);
1900 }
1901 va_end (p);
1902 return build_constructor (type, v);
1903 }
1904
1905 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1906
1907 tree
1908 build_fixed (tree type, FIXED_VALUE_TYPE f)
1909 {
1910 tree v;
1911 FIXED_VALUE_TYPE *fp;
1912
1913 v = make_node (FIXED_CST);
1914 fp = ggc_alloc<fixed_value> ();
1915 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
1916
1917 TREE_TYPE (v) = type;
1918 TREE_FIXED_CST_PTR (v) = fp;
1919 return v;
1920 }
1921
1922 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1923
1924 tree
1925 build_real (tree type, REAL_VALUE_TYPE d)
1926 {
1927 tree v;
1928 REAL_VALUE_TYPE *dp;
1929 int overflow = 0;
1930
1931 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1932 Consider doing it via real_convert now. */
1933
1934 v = make_node (REAL_CST);
1935 dp = ggc_alloc<real_value> ();
1936 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
1937
1938 TREE_TYPE (v) = type;
1939 TREE_REAL_CST_PTR (v) = dp;
1940 TREE_OVERFLOW (v) = overflow;
1941 return v;
1942 }
1943
1944 /* Like build_real, but first truncate D to the type. */
1945
1946 tree
1947 build_real_truncate (tree type, REAL_VALUE_TYPE d)
1948 {
1949 return build_real (type, real_value_truncate (TYPE_MODE (type), d));
1950 }
1951
1952 /* Return a new REAL_CST node whose type is TYPE
1953 and whose value is the integer value of the INTEGER_CST node I. */
1954
1955 REAL_VALUE_TYPE
1956 real_value_from_int_cst (const_tree type, const_tree i)
1957 {
1958 REAL_VALUE_TYPE d;
1959
1960 /* Clear all bits of the real value type so that we can later do
1961 bitwise comparisons to see if two values are the same. */
1962 memset (&d, 0, sizeof d);
1963
1964 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, i,
1965 TYPE_SIGN (TREE_TYPE (i)));
1966 return d;
1967 }
1968
1969 /* Given a tree representing an integer constant I, return a tree
1970 representing the same value as a floating-point constant of type TYPE. */
1971
1972 tree
1973 build_real_from_int_cst (tree type, const_tree i)
1974 {
1975 tree v;
1976 int overflow = TREE_OVERFLOW (i);
1977
1978 v = build_real (type, real_value_from_int_cst (type, i));
1979
1980 TREE_OVERFLOW (v) |= overflow;
1981 return v;
1982 }
1983
1984 /* Return a newly constructed STRING_CST node whose value is
1985 the LEN characters at STR.
1986 Note that for a C string literal, LEN should include the trailing NUL.
1987 The TREE_TYPE is not initialized. */
1988
1989 tree
1990 build_string (int len, const char *str)
1991 {
1992 tree s;
1993 size_t length;
1994
1995 /* Do not waste bytes provided by padding of struct tree_string. */
1996 length = len + offsetof (struct tree_string, str) + 1;
1997
1998 record_node_allocation_statistics (STRING_CST, length);
1999
2000 s = (tree) ggc_internal_alloc (length);
2001
2002 memset (s, 0, sizeof (struct tree_typed));
2003 TREE_SET_CODE (s, STRING_CST);
2004 TREE_CONSTANT (s) = 1;
2005 TREE_STRING_LENGTH (s) = len;
2006 memcpy (s->string.str, str, len);
2007 s->string.str[len] = '\0';
2008
2009 return s;
2010 }
2011
2012 /* Return a newly constructed COMPLEX_CST node whose value is
2013 specified by the real and imaginary parts REAL and IMAG.
2014 Both REAL and IMAG should be constant nodes. TYPE, if specified,
2015 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
2016
2017 tree
2018 build_complex (tree type, tree real, tree imag)
2019 {
2020 tree t = make_node (COMPLEX_CST);
2021
2022 TREE_REALPART (t) = real;
2023 TREE_IMAGPART (t) = imag;
2024 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
2025 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
2026 return t;
2027 }
2028
2029 /* Build a complex (inf +- 0i), such as for the result of cproj.
2030 TYPE is the complex tree type of the result. If NEG is true, the
2031 imaginary zero is negative. */
2032
2033 tree
2034 build_complex_inf (tree type, bool neg)
2035 {
2036 REAL_VALUE_TYPE rinf, rzero = dconst0;
2037
2038 real_inf (&rinf);
2039 rzero.sign = neg;
2040 return build_complex (type, build_real (TREE_TYPE (type), rinf),
2041 build_real (TREE_TYPE (type), rzero));
2042 }
2043
2044 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
2045 element is set to 1. In particular, this is 1 + i for complex types. */
2046
2047 tree
2048 build_each_one_cst (tree type)
2049 {
2050 if (TREE_CODE (type) == COMPLEX_TYPE)
2051 {
2052 tree scalar = build_one_cst (TREE_TYPE (type));
2053 return build_complex (type, scalar, scalar);
2054 }
2055 else
2056 return build_one_cst (type);
2057 }
2058
2059 /* Return a constant of arithmetic type TYPE which is the
2060 multiplicative identity of the set TYPE. */
2061
2062 tree
2063 build_one_cst (tree type)
2064 {
2065 switch (TREE_CODE (type))
2066 {
2067 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2068 case POINTER_TYPE: case REFERENCE_TYPE:
2069 case OFFSET_TYPE:
2070 return build_int_cst (type, 1);
2071
2072 case REAL_TYPE:
2073 return build_real (type, dconst1);
2074
2075 case FIXED_POINT_TYPE:
2076 /* We can only generate 1 for accum types. */
2077 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2078 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
2079
2080 case VECTOR_TYPE:
2081 {
2082 tree scalar = build_one_cst (TREE_TYPE (type));
2083
2084 return build_vector_from_val (type, scalar);
2085 }
2086
2087 case COMPLEX_TYPE:
2088 return build_complex (type,
2089 build_one_cst (TREE_TYPE (type)),
2090 build_zero_cst (TREE_TYPE (type)));
2091
2092 default:
2093 gcc_unreachable ();
2094 }
2095 }
2096
2097 /* Return an integer of type TYPE containing all 1's in as much precision as
2098 it contains, or a complex or vector whose subparts are such integers. */
2099
2100 tree
2101 build_all_ones_cst (tree type)
2102 {
2103 if (TREE_CODE (type) == COMPLEX_TYPE)
2104 {
2105 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2106 return build_complex (type, scalar, scalar);
2107 }
2108 else
2109 return build_minus_one_cst (type);
2110 }
2111
2112 /* Return a constant of arithmetic type TYPE which is the
2113 opposite of the multiplicative identity of the set TYPE. */
2114
2115 tree
2116 build_minus_one_cst (tree type)
2117 {
2118 switch (TREE_CODE (type))
2119 {
2120 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2121 case POINTER_TYPE: case REFERENCE_TYPE:
2122 case OFFSET_TYPE:
2123 return build_int_cst (type, -1);
2124
2125 case REAL_TYPE:
2126 return build_real (type, dconstm1);
2127
2128 case FIXED_POINT_TYPE:
2129 /* We can only generate 1 for accum types. */
2130 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2131 return build_fixed (type, fixed_from_double_int (double_int_minus_one,
2132 TYPE_MODE (type)));
2133
2134 case VECTOR_TYPE:
2135 {
2136 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2137
2138 return build_vector_from_val (type, scalar);
2139 }
2140
2141 case COMPLEX_TYPE:
2142 return build_complex (type,
2143 build_minus_one_cst (TREE_TYPE (type)),
2144 build_zero_cst (TREE_TYPE (type)));
2145
2146 default:
2147 gcc_unreachable ();
2148 }
2149 }
2150
2151 /* Build 0 constant of type TYPE. This is used by constructor folding
2152 and thus the constant should be represented in memory by
2153 zero(es). */
2154
2155 tree
2156 build_zero_cst (tree type)
2157 {
2158 switch (TREE_CODE (type))
2159 {
2160 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2161 case POINTER_TYPE: case REFERENCE_TYPE:
2162 case OFFSET_TYPE: case NULLPTR_TYPE:
2163 return build_int_cst (type, 0);
2164
2165 case REAL_TYPE:
2166 return build_real (type, dconst0);
2167
2168 case FIXED_POINT_TYPE:
2169 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2170
2171 case VECTOR_TYPE:
2172 {
2173 tree scalar = build_zero_cst (TREE_TYPE (type));
2174
2175 return build_vector_from_val (type, scalar);
2176 }
2177
2178 case COMPLEX_TYPE:
2179 {
2180 tree zero = build_zero_cst (TREE_TYPE (type));
2181
2182 return build_complex (type, zero, zero);
2183 }
2184
2185 default:
2186 if (!AGGREGATE_TYPE_P (type))
2187 return fold_convert (type, integer_zero_node);
2188 return build_constructor (type, NULL);
2189 }
2190 }
2191
2192
2193 /* Build a BINFO with LEN language slots. */
2194
2195 tree
2196 make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL)
2197 {
2198 tree t;
2199 size_t length = (offsetof (struct tree_binfo, base_binfos)
2200 + vec<tree, va_gc>::embedded_size (base_binfos));
2201
2202 record_node_allocation_statistics (TREE_BINFO, length);
2203
2204 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2205
2206 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2207
2208 TREE_SET_CODE (t, TREE_BINFO);
2209
2210 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2211
2212 return t;
2213 }
2214
2215 /* Create a CASE_LABEL_EXPR tree node and return it. */
2216
2217 tree
2218 build_case_label (tree low_value, tree high_value, tree label_decl)
2219 {
2220 tree t = make_node (CASE_LABEL_EXPR);
2221
2222 TREE_TYPE (t) = void_type_node;
2223 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2224
2225 CASE_LOW (t) = low_value;
2226 CASE_HIGH (t) = high_value;
2227 CASE_LABEL (t) = label_decl;
2228 CASE_CHAIN (t) = NULL_TREE;
2229
2230 return t;
2231 }
2232
2233 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2234 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2235 The latter determines the length of the HOST_WIDE_INT vector. */
2236
2237 tree
2238 make_int_cst_stat (int len, int ext_len MEM_STAT_DECL)
2239 {
2240 tree t;
2241 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2242 + sizeof (struct tree_int_cst));
2243
2244 gcc_assert (len);
2245 record_node_allocation_statistics (INTEGER_CST, length);
2246
2247 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2248
2249 TREE_SET_CODE (t, INTEGER_CST);
2250 TREE_INT_CST_NUNITS (t) = len;
2251 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2252 /* to_offset can only be applied to trees that are offset_int-sized
2253 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2254 must be exactly the precision of offset_int and so LEN is correct. */
2255 if (ext_len <= OFFSET_INT_ELTS)
2256 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2257 else
2258 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2259
2260 TREE_CONSTANT (t) = 1;
2261
2262 return t;
2263 }
2264
2265 /* Build a newly constructed TREE_VEC node of length LEN. */
2266
2267 tree
2268 make_tree_vec_stat (int len MEM_STAT_DECL)
2269 {
2270 tree t;
2271 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2272
2273 record_node_allocation_statistics (TREE_VEC, length);
2274
2275 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2276
2277 TREE_SET_CODE (t, TREE_VEC);
2278 TREE_VEC_LENGTH (t) = len;
2279
2280 return t;
2281 }
2282
2283 /* Grow a TREE_VEC node to new length LEN. */
2284
2285 tree
2286 grow_tree_vec_stat (tree v, int len MEM_STAT_DECL)
2287 {
2288 gcc_assert (TREE_CODE (v) == TREE_VEC);
2289
2290 int oldlen = TREE_VEC_LENGTH (v);
2291 gcc_assert (len > oldlen);
2292
2293 int oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2294 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2295
2296 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2297
2298 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2299
2300 TREE_VEC_LENGTH (v) = len;
2301
2302 return v;
2303 }
2304 \f
2305 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
2306 fixed, and scalar, complex or vector. */
2307
2308 int
2309 zerop (const_tree expr)
2310 {
2311 return (integer_zerop (expr)
2312 || real_zerop (expr)
2313 || fixed_zerop (expr));
2314 }
2315
2316 /* Return 1 if EXPR is the integer constant zero or a complex constant
2317 of zero. */
2318
2319 int
2320 integer_zerop (const_tree expr)
2321 {
2322 switch (TREE_CODE (expr))
2323 {
2324 case INTEGER_CST:
2325 return wi::eq_p (expr, 0);
2326 case COMPLEX_CST:
2327 return (integer_zerop (TREE_REALPART (expr))
2328 && integer_zerop (TREE_IMAGPART (expr)));
2329 case VECTOR_CST:
2330 {
2331 unsigned i;
2332 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2333 if (!integer_zerop (VECTOR_CST_ELT (expr, i)))
2334 return false;
2335 return true;
2336 }
2337 default:
2338 return false;
2339 }
2340 }
2341
2342 /* Return 1 if EXPR is the integer constant one or the corresponding
2343 complex constant. */
2344
2345 int
2346 integer_onep (const_tree expr)
2347 {
2348 switch (TREE_CODE (expr))
2349 {
2350 case INTEGER_CST:
2351 return wi::eq_p (wi::to_widest (expr), 1);
2352 case COMPLEX_CST:
2353 return (integer_onep (TREE_REALPART (expr))
2354 && integer_zerop (TREE_IMAGPART (expr)));
2355 case VECTOR_CST:
2356 {
2357 unsigned i;
2358 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2359 if (!integer_onep (VECTOR_CST_ELT (expr, i)))
2360 return false;
2361 return true;
2362 }
2363 default:
2364 return false;
2365 }
2366 }
2367
2368 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2369 return 1 if every piece is the integer constant one. */
2370
2371 int
2372 integer_each_onep (const_tree expr)
2373 {
2374 if (TREE_CODE (expr) == COMPLEX_CST)
2375 return (integer_onep (TREE_REALPART (expr))
2376 && integer_onep (TREE_IMAGPART (expr)));
2377 else
2378 return integer_onep (expr);
2379 }
2380
2381 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2382 it contains, or a complex or vector whose subparts are such integers. */
2383
2384 int
2385 integer_all_onesp (const_tree expr)
2386 {
2387 if (TREE_CODE (expr) == COMPLEX_CST
2388 && integer_all_onesp (TREE_REALPART (expr))
2389 && integer_all_onesp (TREE_IMAGPART (expr)))
2390 return 1;
2391
2392 else if (TREE_CODE (expr) == VECTOR_CST)
2393 {
2394 unsigned i;
2395 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2396 if (!integer_all_onesp (VECTOR_CST_ELT (expr, i)))
2397 return 0;
2398 return 1;
2399 }
2400
2401 else if (TREE_CODE (expr) != INTEGER_CST)
2402 return 0;
2403
2404 return wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED) == expr;
2405 }
2406
2407 /* Return 1 if EXPR is the integer constant minus one. */
2408
2409 int
2410 integer_minus_onep (const_tree expr)
2411 {
2412 if (TREE_CODE (expr) == COMPLEX_CST)
2413 return (integer_all_onesp (TREE_REALPART (expr))
2414 && integer_zerop (TREE_IMAGPART (expr)));
2415 else
2416 return integer_all_onesp (expr);
2417 }
2418
2419 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2420 one bit on). */
2421
2422 int
2423 integer_pow2p (const_tree expr)
2424 {
2425 if (TREE_CODE (expr) == COMPLEX_CST
2426 && integer_pow2p (TREE_REALPART (expr))
2427 && integer_zerop (TREE_IMAGPART (expr)))
2428 return 1;
2429
2430 if (TREE_CODE (expr) != INTEGER_CST)
2431 return 0;
2432
2433 return wi::popcount (expr) == 1;
2434 }
2435
2436 /* Return 1 if EXPR is an integer constant other than zero or a
2437 complex constant other than zero. */
2438
2439 int
2440 integer_nonzerop (const_tree expr)
2441 {
2442 return ((TREE_CODE (expr) == INTEGER_CST
2443 && !wi::eq_p (expr, 0))
2444 || (TREE_CODE (expr) == COMPLEX_CST
2445 && (integer_nonzerop (TREE_REALPART (expr))
2446 || integer_nonzerop (TREE_IMAGPART (expr)))));
2447 }
2448
2449 /* Return 1 if EXPR is the integer constant one. For vector,
2450 return 1 if every piece is the integer constant minus one
2451 (representing the value TRUE). */
2452
2453 int
2454 integer_truep (const_tree expr)
2455 {
2456 if (TREE_CODE (expr) == VECTOR_CST)
2457 return integer_all_onesp (expr);
2458 return integer_onep (expr);
2459 }
2460
2461 /* Return 1 if EXPR is the fixed-point constant zero. */
2462
2463 int
2464 fixed_zerop (const_tree expr)
2465 {
2466 return (TREE_CODE (expr) == FIXED_CST
2467 && TREE_FIXED_CST (expr).data.is_zero ());
2468 }
2469
2470 /* Return the power of two represented by a tree node known to be a
2471 power of two. */
2472
2473 int
2474 tree_log2 (const_tree expr)
2475 {
2476 if (TREE_CODE (expr) == COMPLEX_CST)
2477 return tree_log2 (TREE_REALPART (expr));
2478
2479 return wi::exact_log2 (expr);
2480 }
2481
2482 /* Similar, but return the largest integer Y such that 2 ** Y is less
2483 than or equal to EXPR. */
2484
2485 int
2486 tree_floor_log2 (const_tree expr)
2487 {
2488 if (TREE_CODE (expr) == COMPLEX_CST)
2489 return tree_log2 (TREE_REALPART (expr));
2490
2491 return wi::floor_log2 (expr);
2492 }
2493
2494 /* Return number of known trailing zero bits in EXPR, or, if the value of
2495 EXPR is known to be zero, the precision of it's type. */
2496
2497 unsigned int
2498 tree_ctz (const_tree expr)
2499 {
2500 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2501 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2502 return 0;
2503
2504 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2505 switch (TREE_CODE (expr))
2506 {
2507 case INTEGER_CST:
2508 ret1 = wi::ctz (expr);
2509 return MIN (ret1, prec);
2510 case SSA_NAME:
2511 ret1 = wi::ctz (get_nonzero_bits (expr));
2512 return MIN (ret1, prec);
2513 case PLUS_EXPR:
2514 case MINUS_EXPR:
2515 case BIT_IOR_EXPR:
2516 case BIT_XOR_EXPR:
2517 case MIN_EXPR:
2518 case MAX_EXPR:
2519 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2520 if (ret1 == 0)
2521 return ret1;
2522 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2523 return MIN (ret1, ret2);
2524 case POINTER_PLUS_EXPR:
2525 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2526 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2527 /* Second operand is sizetype, which could be in theory
2528 wider than pointer's precision. Make sure we never
2529 return more than prec. */
2530 ret2 = MIN (ret2, prec);
2531 return MIN (ret1, ret2);
2532 case BIT_AND_EXPR:
2533 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2534 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2535 return MAX (ret1, ret2);
2536 case MULT_EXPR:
2537 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2538 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2539 return MIN (ret1 + ret2, prec);
2540 case LSHIFT_EXPR:
2541 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2542 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2543 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2544 {
2545 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2546 return MIN (ret1 + ret2, prec);
2547 }
2548 return ret1;
2549 case RSHIFT_EXPR:
2550 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2551 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2552 {
2553 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2554 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2555 if (ret1 > ret2)
2556 return ret1 - ret2;
2557 }
2558 return 0;
2559 case TRUNC_DIV_EXPR:
2560 case CEIL_DIV_EXPR:
2561 case FLOOR_DIV_EXPR:
2562 case ROUND_DIV_EXPR:
2563 case EXACT_DIV_EXPR:
2564 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2565 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2566 {
2567 int l = tree_log2 (TREE_OPERAND (expr, 1));
2568 if (l >= 0)
2569 {
2570 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2571 ret2 = l;
2572 if (ret1 > ret2)
2573 return ret1 - ret2;
2574 }
2575 }
2576 return 0;
2577 CASE_CONVERT:
2578 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2579 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2580 ret1 = prec;
2581 return MIN (ret1, prec);
2582 case SAVE_EXPR:
2583 return tree_ctz (TREE_OPERAND (expr, 0));
2584 case COND_EXPR:
2585 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2586 if (ret1 == 0)
2587 return 0;
2588 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2589 return MIN (ret1, ret2);
2590 case COMPOUND_EXPR:
2591 return tree_ctz (TREE_OPERAND (expr, 1));
2592 case ADDR_EXPR:
2593 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2594 if (ret1 > BITS_PER_UNIT)
2595 {
2596 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2597 return MIN (ret1, prec);
2598 }
2599 return 0;
2600 default:
2601 return 0;
2602 }
2603 }
2604
2605 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2606 decimal float constants, so don't return 1 for them. */
2607
2608 int
2609 real_zerop (const_tree expr)
2610 {
2611 switch (TREE_CODE (expr))
2612 {
2613 case REAL_CST:
2614 return real_equal (&TREE_REAL_CST (expr), &dconst0)
2615 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2616 case COMPLEX_CST:
2617 return real_zerop (TREE_REALPART (expr))
2618 && real_zerop (TREE_IMAGPART (expr));
2619 case VECTOR_CST:
2620 {
2621 unsigned i;
2622 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2623 if (!real_zerop (VECTOR_CST_ELT (expr, i)))
2624 return false;
2625 return true;
2626 }
2627 default:
2628 return false;
2629 }
2630 }
2631
2632 /* Return 1 if EXPR is the real constant one in real or complex form.
2633 Trailing zeroes matter for decimal float constants, so don't return
2634 1 for them. */
2635
2636 int
2637 real_onep (const_tree expr)
2638 {
2639 switch (TREE_CODE (expr))
2640 {
2641 case REAL_CST:
2642 return real_equal (&TREE_REAL_CST (expr), &dconst1)
2643 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2644 case COMPLEX_CST:
2645 return real_onep (TREE_REALPART (expr))
2646 && real_zerop (TREE_IMAGPART (expr));
2647 case VECTOR_CST:
2648 {
2649 unsigned i;
2650 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2651 if (!real_onep (VECTOR_CST_ELT (expr, i)))
2652 return false;
2653 return true;
2654 }
2655 default:
2656 return false;
2657 }
2658 }
2659
2660 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2661 matter for decimal float constants, so don't return 1 for them. */
2662
2663 int
2664 real_minus_onep (const_tree expr)
2665 {
2666 switch (TREE_CODE (expr))
2667 {
2668 case REAL_CST:
2669 return real_equal (&TREE_REAL_CST (expr), &dconstm1)
2670 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2671 case COMPLEX_CST:
2672 return real_minus_onep (TREE_REALPART (expr))
2673 && real_zerop (TREE_IMAGPART (expr));
2674 case VECTOR_CST:
2675 {
2676 unsigned i;
2677 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2678 if (!real_minus_onep (VECTOR_CST_ELT (expr, i)))
2679 return false;
2680 return true;
2681 }
2682 default:
2683 return false;
2684 }
2685 }
2686
2687 /* Nonzero if EXP is a constant or a cast of a constant. */
2688
2689 int
2690 really_constant_p (const_tree exp)
2691 {
2692 /* This is not quite the same as STRIP_NOPS. It does more. */
2693 while (CONVERT_EXPR_P (exp)
2694 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2695 exp = TREE_OPERAND (exp, 0);
2696 return TREE_CONSTANT (exp);
2697 }
2698 \f
2699 /* Return first list element whose TREE_VALUE is ELEM.
2700 Return 0 if ELEM is not in LIST. */
2701
2702 tree
2703 value_member (tree elem, tree list)
2704 {
2705 while (list)
2706 {
2707 if (elem == TREE_VALUE (list))
2708 return list;
2709 list = TREE_CHAIN (list);
2710 }
2711 return NULL_TREE;
2712 }
2713
2714 /* Return first list element whose TREE_PURPOSE is ELEM.
2715 Return 0 if ELEM is not in LIST. */
2716
2717 tree
2718 purpose_member (const_tree elem, tree list)
2719 {
2720 while (list)
2721 {
2722 if (elem == TREE_PURPOSE (list))
2723 return list;
2724 list = TREE_CHAIN (list);
2725 }
2726 return NULL_TREE;
2727 }
2728
2729 /* Return true if ELEM is in V. */
2730
2731 bool
2732 vec_member (const_tree elem, vec<tree, va_gc> *v)
2733 {
2734 unsigned ix;
2735 tree t;
2736 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2737 if (elem == t)
2738 return true;
2739 return false;
2740 }
2741
2742 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2743 NULL_TREE. */
2744
2745 tree
2746 chain_index (int idx, tree chain)
2747 {
2748 for (; chain && idx > 0; --idx)
2749 chain = TREE_CHAIN (chain);
2750 return chain;
2751 }
2752
2753 /* Return nonzero if ELEM is part of the chain CHAIN. */
2754
2755 int
2756 chain_member (const_tree elem, const_tree chain)
2757 {
2758 while (chain)
2759 {
2760 if (elem == chain)
2761 return 1;
2762 chain = DECL_CHAIN (chain);
2763 }
2764
2765 return 0;
2766 }
2767
2768 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2769 We expect a null pointer to mark the end of the chain.
2770 This is the Lisp primitive `length'. */
2771
2772 int
2773 list_length (const_tree t)
2774 {
2775 const_tree p = t;
2776 #ifdef ENABLE_TREE_CHECKING
2777 const_tree q = t;
2778 #endif
2779 int len = 0;
2780
2781 while (p)
2782 {
2783 p = TREE_CHAIN (p);
2784 #ifdef ENABLE_TREE_CHECKING
2785 if (len % 2)
2786 q = TREE_CHAIN (q);
2787 gcc_assert (p != q);
2788 #endif
2789 len++;
2790 }
2791
2792 return len;
2793 }
2794
2795 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2796 UNION_TYPE TYPE, or NULL_TREE if none. */
2797
2798 tree
2799 first_field (const_tree type)
2800 {
2801 tree t = TYPE_FIELDS (type);
2802 while (t && TREE_CODE (t) != FIELD_DECL)
2803 t = TREE_CHAIN (t);
2804 return t;
2805 }
2806
2807 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2808 by modifying the last node in chain 1 to point to chain 2.
2809 This is the Lisp primitive `nconc'. */
2810
2811 tree
2812 chainon (tree op1, tree op2)
2813 {
2814 tree t1;
2815
2816 if (!op1)
2817 return op2;
2818 if (!op2)
2819 return op1;
2820
2821 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
2822 continue;
2823 TREE_CHAIN (t1) = op2;
2824
2825 #ifdef ENABLE_TREE_CHECKING
2826 {
2827 tree t2;
2828 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
2829 gcc_assert (t2 != t1);
2830 }
2831 #endif
2832
2833 return op1;
2834 }
2835
2836 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2837
2838 tree
2839 tree_last (tree chain)
2840 {
2841 tree next;
2842 if (chain)
2843 while ((next = TREE_CHAIN (chain)))
2844 chain = next;
2845 return chain;
2846 }
2847
2848 /* Reverse the order of elements in the chain T,
2849 and return the new head of the chain (old last element). */
2850
2851 tree
2852 nreverse (tree t)
2853 {
2854 tree prev = 0, decl, next;
2855 for (decl = t; decl; decl = next)
2856 {
2857 /* We shouldn't be using this function to reverse BLOCK chains; we
2858 have blocks_nreverse for that. */
2859 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
2860 next = TREE_CHAIN (decl);
2861 TREE_CHAIN (decl) = prev;
2862 prev = decl;
2863 }
2864 return prev;
2865 }
2866 \f
2867 /* Return a newly created TREE_LIST node whose
2868 purpose and value fields are PARM and VALUE. */
2869
2870 tree
2871 build_tree_list_stat (tree parm, tree value MEM_STAT_DECL)
2872 {
2873 tree t = make_node_stat (TREE_LIST PASS_MEM_STAT);
2874 TREE_PURPOSE (t) = parm;
2875 TREE_VALUE (t) = value;
2876 return t;
2877 }
2878
2879 /* Build a chain of TREE_LIST nodes from a vector. */
2880
2881 tree
2882 build_tree_list_vec_stat (const vec<tree, va_gc> *vec MEM_STAT_DECL)
2883 {
2884 tree ret = NULL_TREE;
2885 tree *pp = &ret;
2886 unsigned int i;
2887 tree t;
2888 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
2889 {
2890 *pp = build_tree_list_stat (NULL, t PASS_MEM_STAT);
2891 pp = &TREE_CHAIN (*pp);
2892 }
2893 return ret;
2894 }
2895
2896 /* Return a newly created TREE_LIST node whose
2897 purpose and value fields are PURPOSE and VALUE
2898 and whose TREE_CHAIN is CHAIN. */
2899
2900 tree
2901 tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL)
2902 {
2903 tree node;
2904
2905 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
2906 memset (node, 0, sizeof (struct tree_common));
2907
2908 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
2909
2910 TREE_SET_CODE (node, TREE_LIST);
2911 TREE_CHAIN (node) = chain;
2912 TREE_PURPOSE (node) = purpose;
2913 TREE_VALUE (node) = value;
2914 return node;
2915 }
2916
2917 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2918 trees. */
2919
2920 vec<tree, va_gc> *
2921 ctor_to_vec (tree ctor)
2922 {
2923 vec<tree, va_gc> *vec;
2924 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
2925 unsigned int ix;
2926 tree val;
2927
2928 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
2929 vec->quick_push (val);
2930
2931 return vec;
2932 }
2933 \f
2934 /* Return the size nominally occupied by an object of type TYPE
2935 when it resides in memory. The value is measured in units of bytes,
2936 and its data type is that normally used for type sizes
2937 (which is the first type created by make_signed_type or
2938 make_unsigned_type). */
2939
2940 tree
2941 size_in_bytes_loc (location_t loc, const_tree type)
2942 {
2943 tree t;
2944
2945 if (type == error_mark_node)
2946 return integer_zero_node;
2947
2948 type = TYPE_MAIN_VARIANT (type);
2949 t = TYPE_SIZE_UNIT (type);
2950
2951 if (t == 0)
2952 {
2953 lang_hooks.types.incomplete_type_error (loc, NULL_TREE, type);
2954 return size_zero_node;
2955 }
2956
2957 return t;
2958 }
2959
2960 /* Return the size of TYPE (in bytes) as a wide integer
2961 or return -1 if the size can vary or is larger than an integer. */
2962
2963 HOST_WIDE_INT
2964 int_size_in_bytes (const_tree type)
2965 {
2966 tree t;
2967
2968 if (type == error_mark_node)
2969 return 0;
2970
2971 type = TYPE_MAIN_VARIANT (type);
2972 t = TYPE_SIZE_UNIT (type);
2973
2974 if (t && tree_fits_uhwi_p (t))
2975 return TREE_INT_CST_LOW (t);
2976 else
2977 return -1;
2978 }
2979
2980 /* Return the maximum size of TYPE (in bytes) as a wide integer
2981 or return -1 if the size can vary or is larger than an integer. */
2982
2983 HOST_WIDE_INT
2984 max_int_size_in_bytes (const_tree type)
2985 {
2986 HOST_WIDE_INT size = -1;
2987 tree size_tree;
2988
2989 /* If this is an array type, check for a possible MAX_SIZE attached. */
2990
2991 if (TREE_CODE (type) == ARRAY_TYPE)
2992 {
2993 size_tree = TYPE_ARRAY_MAX_SIZE (type);
2994
2995 if (size_tree && tree_fits_uhwi_p (size_tree))
2996 size = tree_to_uhwi (size_tree);
2997 }
2998
2999 /* If we still haven't been able to get a size, see if the language
3000 can compute a maximum size. */
3001
3002 if (size == -1)
3003 {
3004 size_tree = lang_hooks.types.max_size (type);
3005
3006 if (size_tree && tree_fits_uhwi_p (size_tree))
3007 size = tree_to_uhwi (size_tree);
3008 }
3009
3010 return size;
3011 }
3012 \f
3013 /* Return the bit position of FIELD, in bits from the start of the record.
3014 This is a tree of type bitsizetype. */
3015
3016 tree
3017 bit_position (const_tree field)
3018 {
3019 return bit_from_pos (DECL_FIELD_OFFSET (field),
3020 DECL_FIELD_BIT_OFFSET (field));
3021 }
3022 \f
3023 /* Return the byte position of FIELD, in bytes from the start of the record.
3024 This is a tree of type sizetype. */
3025
3026 tree
3027 byte_position (const_tree field)
3028 {
3029 return byte_from_pos (DECL_FIELD_OFFSET (field),
3030 DECL_FIELD_BIT_OFFSET (field));
3031 }
3032
3033 /* Likewise, but return as an integer. It must be representable in
3034 that way (since it could be a signed value, we don't have the
3035 option of returning -1 like int_size_in_byte can. */
3036
3037 HOST_WIDE_INT
3038 int_byte_position (const_tree field)
3039 {
3040 return tree_to_shwi (byte_position (field));
3041 }
3042 \f
3043 /* Return the strictest alignment, in bits, that T is known to have. */
3044
3045 unsigned int
3046 expr_align (const_tree t)
3047 {
3048 unsigned int align0, align1;
3049
3050 switch (TREE_CODE (t))
3051 {
3052 CASE_CONVERT: case NON_LVALUE_EXPR:
3053 /* If we have conversions, we know that the alignment of the
3054 object must meet each of the alignments of the types. */
3055 align0 = expr_align (TREE_OPERAND (t, 0));
3056 align1 = TYPE_ALIGN (TREE_TYPE (t));
3057 return MAX (align0, align1);
3058
3059 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
3060 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
3061 case CLEANUP_POINT_EXPR:
3062 /* These don't change the alignment of an object. */
3063 return expr_align (TREE_OPERAND (t, 0));
3064
3065 case COND_EXPR:
3066 /* The best we can do is say that the alignment is the least aligned
3067 of the two arms. */
3068 align0 = expr_align (TREE_OPERAND (t, 1));
3069 align1 = expr_align (TREE_OPERAND (t, 2));
3070 return MIN (align0, align1);
3071
3072 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
3073 meaningfully, it's always 1. */
3074 case LABEL_DECL: case CONST_DECL:
3075 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
3076 case FUNCTION_DECL:
3077 gcc_assert (DECL_ALIGN (t) != 0);
3078 return DECL_ALIGN (t);
3079
3080 default:
3081 break;
3082 }
3083
3084 /* Otherwise take the alignment from that of the type. */
3085 return TYPE_ALIGN (TREE_TYPE (t));
3086 }
3087 \f
3088 /* Return, as a tree node, the number of elements for TYPE (which is an
3089 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3090
3091 tree
3092 array_type_nelts (const_tree type)
3093 {
3094 tree index_type, min, max;
3095
3096 /* If they did it with unspecified bounds, then we should have already
3097 given an error about it before we got here. */
3098 if (! TYPE_DOMAIN (type))
3099 return error_mark_node;
3100
3101 index_type = TYPE_DOMAIN (type);
3102 min = TYPE_MIN_VALUE (index_type);
3103 max = TYPE_MAX_VALUE (index_type);
3104
3105 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3106 if (!max)
3107 return error_mark_node;
3108
3109 return (integer_zerop (min)
3110 ? max
3111 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3112 }
3113 \f
3114 /* If arg is static -- a reference to an object in static storage -- then
3115 return the object. This is not the same as the C meaning of `static'.
3116 If arg isn't static, return NULL. */
3117
3118 tree
3119 staticp (tree arg)
3120 {
3121 switch (TREE_CODE (arg))
3122 {
3123 case FUNCTION_DECL:
3124 /* Nested functions are static, even though taking their address will
3125 involve a trampoline as we unnest the nested function and create
3126 the trampoline on the tree level. */
3127 return arg;
3128
3129 case VAR_DECL:
3130 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3131 && ! DECL_THREAD_LOCAL_P (arg)
3132 && ! DECL_DLLIMPORT_P (arg)
3133 ? arg : NULL);
3134
3135 case CONST_DECL:
3136 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3137 ? arg : NULL);
3138
3139 case CONSTRUCTOR:
3140 return TREE_STATIC (arg) ? arg : NULL;
3141
3142 case LABEL_DECL:
3143 case STRING_CST:
3144 return arg;
3145
3146 case COMPONENT_REF:
3147 /* If the thing being referenced is not a field, then it is
3148 something language specific. */
3149 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3150
3151 /* If we are referencing a bitfield, we can't evaluate an
3152 ADDR_EXPR at compile time and so it isn't a constant. */
3153 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3154 return NULL;
3155
3156 return staticp (TREE_OPERAND (arg, 0));
3157
3158 case BIT_FIELD_REF:
3159 return NULL;
3160
3161 case INDIRECT_REF:
3162 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3163
3164 case ARRAY_REF:
3165 case ARRAY_RANGE_REF:
3166 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3167 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3168 return staticp (TREE_OPERAND (arg, 0));
3169 else
3170 return NULL;
3171
3172 case COMPOUND_LITERAL_EXPR:
3173 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3174
3175 default:
3176 return NULL;
3177 }
3178 }
3179
3180 \f
3181
3182
3183 /* Return whether OP is a DECL whose address is function-invariant. */
3184
3185 bool
3186 decl_address_invariant_p (const_tree op)
3187 {
3188 /* The conditions below are slightly less strict than the one in
3189 staticp. */
3190
3191 switch (TREE_CODE (op))
3192 {
3193 case PARM_DECL:
3194 case RESULT_DECL:
3195 case LABEL_DECL:
3196 case FUNCTION_DECL:
3197 return true;
3198
3199 case VAR_DECL:
3200 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3201 || DECL_THREAD_LOCAL_P (op)
3202 || DECL_CONTEXT (op) == current_function_decl
3203 || decl_function_context (op) == current_function_decl)
3204 return true;
3205 break;
3206
3207 case CONST_DECL:
3208 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3209 || decl_function_context (op) == current_function_decl)
3210 return true;
3211 break;
3212
3213 default:
3214 break;
3215 }
3216
3217 return false;
3218 }
3219
3220 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3221
3222 bool
3223 decl_address_ip_invariant_p (const_tree op)
3224 {
3225 /* The conditions below are slightly less strict than the one in
3226 staticp. */
3227
3228 switch (TREE_CODE (op))
3229 {
3230 case LABEL_DECL:
3231 case FUNCTION_DECL:
3232 case STRING_CST:
3233 return true;
3234
3235 case VAR_DECL:
3236 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3237 && !DECL_DLLIMPORT_P (op))
3238 || DECL_THREAD_LOCAL_P (op))
3239 return true;
3240 break;
3241
3242 case CONST_DECL:
3243 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3244 return true;
3245 break;
3246
3247 default:
3248 break;
3249 }
3250
3251 return false;
3252 }
3253
3254
3255 /* Return true if T is function-invariant (internal function, does
3256 not handle arithmetic; that's handled in skip_simple_arithmetic and
3257 tree_invariant_p). */
3258
3259 static bool
3260 tree_invariant_p_1 (tree t)
3261 {
3262 tree op;
3263
3264 if (TREE_CONSTANT (t)
3265 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3266 return true;
3267
3268 switch (TREE_CODE (t))
3269 {
3270 case SAVE_EXPR:
3271 return true;
3272
3273 case ADDR_EXPR:
3274 op = TREE_OPERAND (t, 0);
3275 while (handled_component_p (op))
3276 {
3277 switch (TREE_CODE (op))
3278 {
3279 case ARRAY_REF:
3280 case ARRAY_RANGE_REF:
3281 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3282 || TREE_OPERAND (op, 2) != NULL_TREE
3283 || TREE_OPERAND (op, 3) != NULL_TREE)
3284 return false;
3285 break;
3286
3287 case COMPONENT_REF:
3288 if (TREE_OPERAND (op, 2) != NULL_TREE)
3289 return false;
3290 break;
3291
3292 default:;
3293 }
3294 op = TREE_OPERAND (op, 0);
3295 }
3296
3297 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3298
3299 default:
3300 break;
3301 }
3302
3303 return false;
3304 }
3305
3306 /* Return true if T is function-invariant. */
3307
3308 bool
3309 tree_invariant_p (tree t)
3310 {
3311 tree inner = skip_simple_arithmetic (t);
3312 return tree_invariant_p_1 (inner);
3313 }
3314
3315 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3316 Do this to any expression which may be used in more than one place,
3317 but must be evaluated only once.
3318
3319 Normally, expand_expr would reevaluate the expression each time.
3320 Calling save_expr produces something that is evaluated and recorded
3321 the first time expand_expr is called on it. Subsequent calls to
3322 expand_expr just reuse the recorded value.
3323
3324 The call to expand_expr that generates code that actually computes
3325 the value is the first call *at compile time*. Subsequent calls
3326 *at compile time* generate code to use the saved value.
3327 This produces correct result provided that *at run time* control
3328 always flows through the insns made by the first expand_expr
3329 before reaching the other places where the save_expr was evaluated.
3330 You, the caller of save_expr, must make sure this is so.
3331
3332 Constants, and certain read-only nodes, are returned with no
3333 SAVE_EXPR because that is safe. Expressions containing placeholders
3334 are not touched; see tree.def for an explanation of what these
3335 are used for. */
3336
3337 tree
3338 save_expr (tree expr)
3339 {
3340 tree t = fold (expr);
3341 tree inner;
3342
3343 /* If the tree evaluates to a constant, then we don't want to hide that
3344 fact (i.e. this allows further folding, and direct checks for constants).
3345 However, a read-only object that has side effects cannot be bypassed.
3346 Since it is no problem to reevaluate literals, we just return the
3347 literal node. */
3348 inner = skip_simple_arithmetic (t);
3349 if (TREE_CODE (inner) == ERROR_MARK)
3350 return inner;
3351
3352 if (tree_invariant_p_1 (inner))
3353 return t;
3354
3355 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3356 it means that the size or offset of some field of an object depends on
3357 the value within another field.
3358
3359 Note that it must not be the case that T contains both a PLACEHOLDER_EXPR
3360 and some variable since it would then need to be both evaluated once and
3361 evaluated more than once. Front-ends must assure this case cannot
3362 happen by surrounding any such subexpressions in their own SAVE_EXPR
3363 and forcing evaluation at the proper time. */
3364 if (contains_placeholder_p (inner))
3365 return t;
3366
3367 t = build1 (SAVE_EXPR, TREE_TYPE (expr), t);
3368 SET_EXPR_LOCATION (t, EXPR_LOCATION (expr));
3369
3370 /* This expression might be placed ahead of a jump to ensure that the
3371 value was computed on both sides of the jump. So make sure it isn't
3372 eliminated as dead. */
3373 TREE_SIDE_EFFECTS (t) = 1;
3374 return t;
3375 }
3376
3377 /* Look inside EXPR into any simple arithmetic operations. Return the
3378 outermost non-arithmetic or non-invariant node. */
3379
3380 tree
3381 skip_simple_arithmetic (tree expr)
3382 {
3383 /* We don't care about whether this can be used as an lvalue in this
3384 context. */
3385 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3386 expr = TREE_OPERAND (expr, 0);
3387
3388 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3389 a constant, it will be more efficient to not make another SAVE_EXPR since
3390 it will allow better simplification and GCSE will be able to merge the
3391 computations if they actually occur. */
3392 while (true)
3393 {
3394 if (UNARY_CLASS_P (expr))
3395 expr = TREE_OPERAND (expr, 0);
3396 else if (BINARY_CLASS_P (expr))
3397 {
3398 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3399 expr = TREE_OPERAND (expr, 0);
3400 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3401 expr = TREE_OPERAND (expr, 1);
3402 else
3403 break;
3404 }
3405 else
3406 break;
3407 }
3408
3409 return expr;
3410 }
3411
3412 /* Look inside EXPR into simple arithmetic operations involving constants.
3413 Return the outermost non-arithmetic or non-constant node. */
3414
3415 tree
3416 skip_simple_constant_arithmetic (tree expr)
3417 {
3418 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3419 expr = TREE_OPERAND (expr, 0);
3420
3421 while (true)
3422 {
3423 if (UNARY_CLASS_P (expr))
3424 expr = TREE_OPERAND (expr, 0);
3425 else if (BINARY_CLASS_P (expr))
3426 {
3427 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3428 expr = TREE_OPERAND (expr, 0);
3429 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3430 expr = TREE_OPERAND (expr, 1);
3431 else
3432 break;
3433 }
3434 else
3435 break;
3436 }
3437
3438 return expr;
3439 }
3440
3441 /* Return which tree structure is used by T. */
3442
3443 enum tree_node_structure_enum
3444 tree_node_structure (const_tree t)
3445 {
3446 const enum tree_code code = TREE_CODE (t);
3447 return tree_node_structure_for_code (code);
3448 }
3449
3450 /* Set various status flags when building a CALL_EXPR object T. */
3451
3452 static void
3453 process_call_operands (tree t)
3454 {
3455 bool side_effects = TREE_SIDE_EFFECTS (t);
3456 bool read_only = false;
3457 int i = call_expr_flags (t);
3458
3459 /* Calls have side-effects, except those to const or pure functions. */
3460 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3461 side_effects = true;
3462 /* Propagate TREE_READONLY of arguments for const functions. */
3463 if (i & ECF_CONST)
3464 read_only = true;
3465
3466 if (!side_effects || read_only)
3467 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3468 {
3469 tree op = TREE_OPERAND (t, i);
3470 if (op && TREE_SIDE_EFFECTS (op))
3471 side_effects = true;
3472 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3473 read_only = false;
3474 }
3475
3476 TREE_SIDE_EFFECTS (t) = side_effects;
3477 TREE_READONLY (t) = read_only;
3478 }
3479 \f
3480 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3481 size or offset that depends on a field within a record. */
3482
3483 bool
3484 contains_placeholder_p (const_tree exp)
3485 {
3486 enum tree_code code;
3487
3488 if (!exp)
3489 return 0;
3490
3491 code = TREE_CODE (exp);
3492 if (code == PLACEHOLDER_EXPR)
3493 return 1;
3494
3495 switch (TREE_CODE_CLASS (code))
3496 {
3497 case tcc_reference:
3498 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3499 position computations since they will be converted into a
3500 WITH_RECORD_EXPR involving the reference, which will assume
3501 here will be valid. */
3502 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3503
3504 case tcc_exceptional:
3505 if (code == TREE_LIST)
3506 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3507 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3508 break;
3509
3510 case tcc_unary:
3511 case tcc_binary:
3512 case tcc_comparison:
3513 case tcc_expression:
3514 switch (code)
3515 {
3516 case COMPOUND_EXPR:
3517 /* Ignoring the first operand isn't quite right, but works best. */
3518 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3519
3520 case COND_EXPR:
3521 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3522 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3523 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3524
3525 case SAVE_EXPR:
3526 /* The save_expr function never wraps anything containing
3527 a PLACEHOLDER_EXPR. */
3528 return 0;
3529
3530 default:
3531 break;
3532 }
3533
3534 switch (TREE_CODE_LENGTH (code))
3535 {
3536 case 1:
3537 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3538 case 2:
3539 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3540 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3541 default:
3542 return 0;
3543 }
3544
3545 case tcc_vl_exp:
3546 switch (code)
3547 {
3548 case CALL_EXPR:
3549 {
3550 const_tree arg;
3551 const_call_expr_arg_iterator iter;
3552 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3553 if (CONTAINS_PLACEHOLDER_P (arg))
3554 return 1;
3555 return 0;
3556 }
3557 default:
3558 return 0;
3559 }
3560
3561 default:
3562 return 0;
3563 }
3564 return 0;
3565 }
3566
3567 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3568 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3569 field positions. */
3570
3571 static bool
3572 type_contains_placeholder_1 (const_tree type)
3573 {
3574 /* If the size contains a placeholder or the parent type (component type in
3575 the case of arrays) type involves a placeholder, this type does. */
3576 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3577 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3578 || (!POINTER_TYPE_P (type)
3579 && TREE_TYPE (type)
3580 && type_contains_placeholder_p (TREE_TYPE (type))))
3581 return true;
3582
3583 /* Now do type-specific checks. Note that the last part of the check above
3584 greatly limits what we have to do below. */
3585 switch (TREE_CODE (type))
3586 {
3587 case VOID_TYPE:
3588 case POINTER_BOUNDS_TYPE:
3589 case COMPLEX_TYPE:
3590 case ENUMERAL_TYPE:
3591 case BOOLEAN_TYPE:
3592 case POINTER_TYPE:
3593 case OFFSET_TYPE:
3594 case REFERENCE_TYPE:
3595 case METHOD_TYPE:
3596 case FUNCTION_TYPE:
3597 case VECTOR_TYPE:
3598 case NULLPTR_TYPE:
3599 return false;
3600
3601 case INTEGER_TYPE:
3602 case REAL_TYPE:
3603 case FIXED_POINT_TYPE:
3604 /* Here we just check the bounds. */
3605 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3606 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3607
3608 case ARRAY_TYPE:
3609 /* We have already checked the component type above, so just check
3610 the domain type. Flexible array members have a null domain. */
3611 return TYPE_DOMAIN (type) ?
3612 type_contains_placeholder_p (TYPE_DOMAIN (type)) : false;
3613
3614 case RECORD_TYPE:
3615 case UNION_TYPE:
3616 case QUAL_UNION_TYPE:
3617 {
3618 tree field;
3619
3620 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3621 if (TREE_CODE (field) == FIELD_DECL
3622 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3623 || (TREE_CODE (type) == QUAL_UNION_TYPE
3624 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3625 || type_contains_placeholder_p (TREE_TYPE (field))))
3626 return true;
3627
3628 return false;
3629 }
3630
3631 default:
3632 gcc_unreachable ();
3633 }
3634 }
3635
3636 /* Wrapper around above function used to cache its result. */
3637
3638 bool
3639 type_contains_placeholder_p (tree type)
3640 {
3641 bool result;
3642
3643 /* If the contains_placeholder_bits field has been initialized,
3644 then we know the answer. */
3645 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3646 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3647
3648 /* Indicate that we've seen this type node, and the answer is false.
3649 This is what we want to return if we run into recursion via fields. */
3650 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3651
3652 /* Compute the real value. */
3653 result = type_contains_placeholder_1 (type);
3654
3655 /* Store the real value. */
3656 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3657
3658 return result;
3659 }
3660 \f
3661 /* Push tree EXP onto vector QUEUE if it is not already present. */
3662
3663 static void
3664 push_without_duplicates (tree exp, vec<tree> *queue)
3665 {
3666 unsigned int i;
3667 tree iter;
3668
3669 FOR_EACH_VEC_ELT (*queue, i, iter)
3670 if (simple_cst_equal (iter, exp) == 1)
3671 break;
3672
3673 if (!iter)
3674 queue->safe_push (exp);
3675 }
3676
3677 /* Given a tree EXP, find all occurrences of references to fields
3678 in a PLACEHOLDER_EXPR and place them in vector REFS without
3679 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3680 we assume here that EXP contains only arithmetic expressions
3681 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3682 argument list. */
3683
3684 void
3685 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3686 {
3687 enum tree_code code = TREE_CODE (exp);
3688 tree inner;
3689 int i;
3690
3691 /* We handle TREE_LIST and COMPONENT_REF separately. */
3692 if (code == TREE_LIST)
3693 {
3694 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3695 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3696 }
3697 else if (code == COMPONENT_REF)
3698 {
3699 for (inner = TREE_OPERAND (exp, 0);
3700 REFERENCE_CLASS_P (inner);
3701 inner = TREE_OPERAND (inner, 0))
3702 ;
3703
3704 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3705 push_without_duplicates (exp, refs);
3706 else
3707 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3708 }
3709 else
3710 switch (TREE_CODE_CLASS (code))
3711 {
3712 case tcc_constant:
3713 break;
3714
3715 case tcc_declaration:
3716 /* Variables allocated to static storage can stay. */
3717 if (!TREE_STATIC (exp))
3718 push_without_duplicates (exp, refs);
3719 break;
3720
3721 case tcc_expression:
3722 /* This is the pattern built in ada/make_aligning_type. */
3723 if (code == ADDR_EXPR
3724 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3725 {
3726 push_without_duplicates (exp, refs);
3727 break;
3728 }
3729
3730 /* Fall through... */
3731
3732 case tcc_exceptional:
3733 case tcc_unary:
3734 case tcc_binary:
3735 case tcc_comparison:
3736 case tcc_reference:
3737 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3738 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3739 break;
3740
3741 case tcc_vl_exp:
3742 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3743 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3744 break;
3745
3746 default:
3747 gcc_unreachable ();
3748 }
3749 }
3750
3751 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3752 return a tree with all occurrences of references to F in a
3753 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3754 CONST_DECLs. Note that we assume here that EXP contains only
3755 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3756 occurring only in their argument list. */
3757
3758 tree
3759 substitute_in_expr (tree exp, tree f, tree r)
3760 {
3761 enum tree_code code = TREE_CODE (exp);
3762 tree op0, op1, op2, op3;
3763 tree new_tree;
3764
3765 /* We handle TREE_LIST and COMPONENT_REF separately. */
3766 if (code == TREE_LIST)
3767 {
3768 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3769 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3770 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3771 return exp;
3772
3773 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3774 }
3775 else if (code == COMPONENT_REF)
3776 {
3777 tree inner;
3778
3779 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3780 and it is the right field, replace it with R. */
3781 for (inner = TREE_OPERAND (exp, 0);
3782 REFERENCE_CLASS_P (inner);
3783 inner = TREE_OPERAND (inner, 0))
3784 ;
3785
3786 /* The field. */
3787 op1 = TREE_OPERAND (exp, 1);
3788
3789 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3790 return r;
3791
3792 /* If this expression hasn't been completed let, leave it alone. */
3793 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
3794 return exp;
3795
3796 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3797 if (op0 == TREE_OPERAND (exp, 0))
3798 return exp;
3799
3800 new_tree
3801 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
3802 }
3803 else
3804 switch (TREE_CODE_CLASS (code))
3805 {
3806 case tcc_constant:
3807 return exp;
3808
3809 case tcc_declaration:
3810 if (exp == f)
3811 return r;
3812 else
3813 return exp;
3814
3815 case tcc_expression:
3816 if (exp == f)
3817 return r;
3818
3819 /* Fall through... */
3820
3821 case tcc_exceptional:
3822 case tcc_unary:
3823 case tcc_binary:
3824 case tcc_comparison:
3825 case tcc_reference:
3826 switch (TREE_CODE_LENGTH (code))
3827 {
3828 case 0:
3829 return exp;
3830
3831 case 1:
3832 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3833 if (op0 == TREE_OPERAND (exp, 0))
3834 return exp;
3835
3836 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3837 break;
3838
3839 case 2:
3840 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3841 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3842
3843 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3844 return exp;
3845
3846 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3847 break;
3848
3849 case 3:
3850 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3851 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3852 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3853
3854 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3855 && op2 == TREE_OPERAND (exp, 2))
3856 return exp;
3857
3858 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3859 break;
3860
3861 case 4:
3862 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3863 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3864 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3865 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
3866
3867 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3868 && op2 == TREE_OPERAND (exp, 2)
3869 && op3 == TREE_OPERAND (exp, 3))
3870 return exp;
3871
3872 new_tree
3873 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3874 break;
3875
3876 default:
3877 gcc_unreachable ();
3878 }
3879 break;
3880
3881 case tcc_vl_exp:
3882 {
3883 int i;
3884
3885 new_tree = NULL_TREE;
3886
3887 /* If we are trying to replace F with a constant, inline back
3888 functions which do nothing else than computing a value from
3889 the arguments they are passed. This makes it possible to
3890 fold partially or entirely the replacement expression. */
3891 if (CONSTANT_CLASS_P (r) && code == CALL_EXPR)
3892 {
3893 tree t = maybe_inline_call_in_expr (exp);
3894 if (t)
3895 return SUBSTITUTE_IN_EXPR (t, f, r);
3896 }
3897
3898 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3899 {
3900 tree op = TREE_OPERAND (exp, i);
3901 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
3902 if (new_op != op)
3903 {
3904 if (!new_tree)
3905 new_tree = copy_node (exp);
3906 TREE_OPERAND (new_tree, i) = new_op;
3907 }
3908 }
3909
3910 if (new_tree)
3911 {
3912 new_tree = fold (new_tree);
3913 if (TREE_CODE (new_tree) == CALL_EXPR)
3914 process_call_operands (new_tree);
3915 }
3916 else
3917 return exp;
3918 }
3919 break;
3920
3921 default:
3922 gcc_unreachable ();
3923 }
3924
3925 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3926
3927 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3928 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3929
3930 return new_tree;
3931 }
3932
3933 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3934 for it within OBJ, a tree that is an object or a chain of references. */
3935
3936 tree
3937 substitute_placeholder_in_expr (tree exp, tree obj)
3938 {
3939 enum tree_code code = TREE_CODE (exp);
3940 tree op0, op1, op2, op3;
3941 tree new_tree;
3942
3943 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3944 in the chain of OBJ. */
3945 if (code == PLACEHOLDER_EXPR)
3946 {
3947 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
3948 tree elt;
3949
3950 for (elt = obj; elt != 0;
3951 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3952 || TREE_CODE (elt) == COND_EXPR)
3953 ? TREE_OPERAND (elt, 1)
3954 : (REFERENCE_CLASS_P (elt)
3955 || UNARY_CLASS_P (elt)
3956 || BINARY_CLASS_P (elt)
3957 || VL_EXP_CLASS_P (elt)
3958 || EXPRESSION_CLASS_P (elt))
3959 ? TREE_OPERAND (elt, 0) : 0))
3960 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
3961 return elt;
3962
3963 for (elt = obj; elt != 0;
3964 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3965 || TREE_CODE (elt) == COND_EXPR)
3966 ? TREE_OPERAND (elt, 1)
3967 : (REFERENCE_CLASS_P (elt)
3968 || UNARY_CLASS_P (elt)
3969 || BINARY_CLASS_P (elt)
3970 || VL_EXP_CLASS_P (elt)
3971 || EXPRESSION_CLASS_P (elt))
3972 ? TREE_OPERAND (elt, 0) : 0))
3973 if (POINTER_TYPE_P (TREE_TYPE (elt))
3974 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
3975 == need_type))
3976 return fold_build1 (INDIRECT_REF, need_type, elt);
3977
3978 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
3979 survives until RTL generation, there will be an error. */
3980 return exp;
3981 }
3982
3983 /* TREE_LIST is special because we need to look at TREE_VALUE
3984 and TREE_CHAIN, not TREE_OPERANDS. */
3985 else if (code == TREE_LIST)
3986 {
3987 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
3988 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
3989 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3990 return exp;
3991
3992 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3993 }
3994 else
3995 switch (TREE_CODE_CLASS (code))
3996 {
3997 case tcc_constant:
3998 case tcc_declaration:
3999 return exp;
4000
4001 case tcc_exceptional:
4002 case tcc_unary:
4003 case tcc_binary:
4004 case tcc_comparison:
4005 case tcc_expression:
4006 case tcc_reference:
4007 case tcc_statement:
4008 switch (TREE_CODE_LENGTH (code))
4009 {
4010 case 0:
4011 return exp;
4012
4013 case 1:
4014 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4015 if (op0 == TREE_OPERAND (exp, 0))
4016 return exp;
4017
4018 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4019 break;
4020
4021 case 2:
4022 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4023 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4024
4025 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4026 return exp;
4027
4028 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4029 break;
4030
4031 case 3:
4032 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4033 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4034 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4035
4036 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4037 && op2 == TREE_OPERAND (exp, 2))
4038 return exp;
4039
4040 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4041 break;
4042
4043 case 4:
4044 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4045 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4046 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4047 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
4048
4049 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4050 && op2 == TREE_OPERAND (exp, 2)
4051 && op3 == TREE_OPERAND (exp, 3))
4052 return exp;
4053
4054 new_tree
4055 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4056 break;
4057
4058 default:
4059 gcc_unreachable ();
4060 }
4061 break;
4062
4063 case tcc_vl_exp:
4064 {
4065 int i;
4066
4067 new_tree = NULL_TREE;
4068
4069 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4070 {
4071 tree op = TREE_OPERAND (exp, i);
4072 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
4073 if (new_op != op)
4074 {
4075 if (!new_tree)
4076 new_tree = copy_node (exp);
4077 TREE_OPERAND (new_tree, i) = new_op;
4078 }
4079 }
4080
4081 if (new_tree)
4082 {
4083 new_tree = fold (new_tree);
4084 if (TREE_CODE (new_tree) == CALL_EXPR)
4085 process_call_operands (new_tree);
4086 }
4087 else
4088 return exp;
4089 }
4090 break;
4091
4092 default:
4093 gcc_unreachable ();
4094 }
4095
4096 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4097
4098 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4099 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4100
4101 return new_tree;
4102 }
4103 \f
4104
4105 /* Subroutine of stabilize_reference; this is called for subtrees of
4106 references. Any expression with side-effects must be put in a SAVE_EXPR
4107 to ensure that it is only evaluated once.
4108
4109 We don't put SAVE_EXPR nodes around everything, because assigning very
4110 simple expressions to temporaries causes us to miss good opportunities
4111 for optimizations. Among other things, the opportunity to fold in the
4112 addition of a constant into an addressing mode often gets lost, e.g.
4113 "y[i+1] += x;". In general, we take the approach that we should not make
4114 an assignment unless we are forced into it - i.e., that any non-side effect
4115 operator should be allowed, and that cse should take care of coalescing
4116 multiple utterances of the same expression should that prove fruitful. */
4117
4118 static tree
4119 stabilize_reference_1 (tree e)
4120 {
4121 tree result;
4122 enum tree_code code = TREE_CODE (e);
4123
4124 /* We cannot ignore const expressions because it might be a reference
4125 to a const array but whose index contains side-effects. But we can
4126 ignore things that are actual constant or that already have been
4127 handled by this function. */
4128
4129 if (tree_invariant_p (e))
4130 return e;
4131
4132 switch (TREE_CODE_CLASS (code))
4133 {
4134 case tcc_exceptional:
4135 case tcc_type:
4136 case tcc_declaration:
4137 case tcc_comparison:
4138 case tcc_statement:
4139 case tcc_expression:
4140 case tcc_reference:
4141 case tcc_vl_exp:
4142 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4143 so that it will only be evaluated once. */
4144 /* The reference (r) and comparison (<) classes could be handled as
4145 below, but it is generally faster to only evaluate them once. */
4146 if (TREE_SIDE_EFFECTS (e))
4147 return save_expr (e);
4148 return e;
4149
4150 case tcc_constant:
4151 /* Constants need no processing. In fact, we should never reach
4152 here. */
4153 return e;
4154
4155 case tcc_binary:
4156 /* Division is slow and tends to be compiled with jumps,
4157 especially the division by powers of 2 that is often
4158 found inside of an array reference. So do it just once. */
4159 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4160 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4161 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4162 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4163 return save_expr (e);
4164 /* Recursively stabilize each operand. */
4165 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4166 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4167 break;
4168
4169 case tcc_unary:
4170 /* Recursively stabilize each operand. */
4171 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4172 break;
4173
4174 default:
4175 gcc_unreachable ();
4176 }
4177
4178 TREE_TYPE (result) = TREE_TYPE (e);
4179 TREE_READONLY (result) = TREE_READONLY (e);
4180 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4181 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4182
4183 return result;
4184 }
4185
4186 /* Stabilize a reference so that we can use it any number of times
4187 without causing its operands to be evaluated more than once.
4188 Returns the stabilized reference. This works by means of save_expr,
4189 so see the caveats in the comments about save_expr.
4190
4191 Also allows conversion expressions whose operands are references.
4192 Any other kind of expression is returned unchanged. */
4193
4194 tree
4195 stabilize_reference (tree ref)
4196 {
4197 tree result;
4198 enum tree_code code = TREE_CODE (ref);
4199
4200 switch (code)
4201 {
4202 case VAR_DECL:
4203 case PARM_DECL:
4204 case RESULT_DECL:
4205 /* No action is needed in this case. */
4206 return ref;
4207
4208 CASE_CONVERT:
4209 case FLOAT_EXPR:
4210 case FIX_TRUNC_EXPR:
4211 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4212 break;
4213
4214 case INDIRECT_REF:
4215 result = build_nt (INDIRECT_REF,
4216 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4217 break;
4218
4219 case COMPONENT_REF:
4220 result = build_nt (COMPONENT_REF,
4221 stabilize_reference (TREE_OPERAND (ref, 0)),
4222 TREE_OPERAND (ref, 1), NULL_TREE);
4223 break;
4224
4225 case BIT_FIELD_REF:
4226 result = build_nt (BIT_FIELD_REF,
4227 stabilize_reference (TREE_OPERAND (ref, 0)),
4228 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4229 REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
4230 break;
4231
4232 case ARRAY_REF:
4233 result = build_nt (ARRAY_REF,
4234 stabilize_reference (TREE_OPERAND (ref, 0)),
4235 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4236 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4237 break;
4238
4239 case ARRAY_RANGE_REF:
4240 result = build_nt (ARRAY_RANGE_REF,
4241 stabilize_reference (TREE_OPERAND (ref, 0)),
4242 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4243 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4244 break;
4245
4246 case COMPOUND_EXPR:
4247 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4248 it wouldn't be ignored. This matters when dealing with
4249 volatiles. */
4250 return stabilize_reference_1 (ref);
4251
4252 /* If arg isn't a kind of lvalue we recognize, make no change.
4253 Caller should recognize the error for an invalid lvalue. */
4254 default:
4255 return ref;
4256
4257 case ERROR_MARK:
4258 return error_mark_node;
4259 }
4260
4261 TREE_TYPE (result) = TREE_TYPE (ref);
4262 TREE_READONLY (result) = TREE_READONLY (ref);
4263 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4264 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4265
4266 return result;
4267 }
4268 \f
4269 /* Low-level constructors for expressions. */
4270
4271 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4272 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4273
4274 void
4275 recompute_tree_invariant_for_addr_expr (tree t)
4276 {
4277 tree node;
4278 bool tc = true, se = false;
4279
4280 gcc_assert (TREE_CODE (t) == ADDR_EXPR);
4281
4282 /* We started out assuming this address is both invariant and constant, but
4283 does not have side effects. Now go down any handled components and see if
4284 any of them involve offsets that are either non-constant or non-invariant.
4285 Also check for side-effects.
4286
4287 ??? Note that this code makes no attempt to deal with the case where
4288 taking the address of something causes a copy due to misalignment. */
4289
4290 #define UPDATE_FLAGS(NODE) \
4291 do { tree _node = (NODE); \
4292 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4293 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4294
4295 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4296 node = TREE_OPERAND (node, 0))
4297 {
4298 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4299 array reference (probably made temporarily by the G++ front end),
4300 so ignore all the operands. */
4301 if ((TREE_CODE (node) == ARRAY_REF
4302 || TREE_CODE (node) == ARRAY_RANGE_REF)
4303 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4304 {
4305 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4306 if (TREE_OPERAND (node, 2))
4307 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4308 if (TREE_OPERAND (node, 3))
4309 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4310 }
4311 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4312 FIELD_DECL, apparently. The G++ front end can put something else
4313 there, at least temporarily. */
4314 else if (TREE_CODE (node) == COMPONENT_REF
4315 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4316 {
4317 if (TREE_OPERAND (node, 2))
4318 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4319 }
4320 }
4321
4322 node = lang_hooks.expr_to_decl (node, &tc, &se);
4323
4324 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4325 the address, since &(*a)->b is a form of addition. If it's a constant, the
4326 address is constant too. If it's a decl, its address is constant if the
4327 decl is static. Everything else is not constant and, furthermore,
4328 taking the address of a volatile variable is not volatile. */
4329 if (TREE_CODE (node) == INDIRECT_REF
4330 || TREE_CODE (node) == MEM_REF)
4331 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4332 else if (CONSTANT_CLASS_P (node))
4333 ;
4334 else if (DECL_P (node))
4335 tc &= (staticp (node) != NULL_TREE);
4336 else
4337 {
4338 tc = false;
4339 se |= TREE_SIDE_EFFECTS (node);
4340 }
4341
4342
4343 TREE_CONSTANT (t) = tc;
4344 TREE_SIDE_EFFECTS (t) = se;
4345 #undef UPDATE_FLAGS
4346 }
4347
4348 /* Build an expression of code CODE, data type TYPE, and operands as
4349 specified. Expressions and reference nodes can be created this way.
4350 Constants, decls, types and misc nodes cannot be.
4351
4352 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4353 enough for all extant tree codes. */
4354
4355 tree
4356 build0_stat (enum tree_code code, tree tt MEM_STAT_DECL)
4357 {
4358 tree t;
4359
4360 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4361
4362 t = make_node_stat (code PASS_MEM_STAT);
4363 TREE_TYPE (t) = tt;
4364
4365 return t;
4366 }
4367
4368 tree
4369 build1_stat (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4370 {
4371 int length = sizeof (struct tree_exp);
4372 tree t;
4373
4374 record_node_allocation_statistics (code, length);
4375
4376 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4377
4378 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4379
4380 memset (t, 0, sizeof (struct tree_common));
4381
4382 TREE_SET_CODE (t, code);
4383
4384 TREE_TYPE (t) = type;
4385 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4386 TREE_OPERAND (t, 0) = node;
4387 if (node && !TYPE_P (node))
4388 {
4389 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4390 TREE_READONLY (t) = TREE_READONLY (node);
4391 }
4392
4393 if (TREE_CODE_CLASS (code) == tcc_statement)
4394 TREE_SIDE_EFFECTS (t) = 1;
4395 else switch (code)
4396 {
4397 case VA_ARG_EXPR:
4398 /* All of these have side-effects, no matter what their
4399 operands are. */
4400 TREE_SIDE_EFFECTS (t) = 1;
4401 TREE_READONLY (t) = 0;
4402 break;
4403
4404 case INDIRECT_REF:
4405 /* Whether a dereference is readonly has nothing to do with whether
4406 its operand is readonly. */
4407 TREE_READONLY (t) = 0;
4408 break;
4409
4410 case ADDR_EXPR:
4411 if (node)
4412 recompute_tree_invariant_for_addr_expr (t);
4413 break;
4414
4415 default:
4416 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4417 && node && !TYPE_P (node)
4418 && TREE_CONSTANT (node))
4419 TREE_CONSTANT (t) = 1;
4420 if (TREE_CODE_CLASS (code) == tcc_reference
4421 && node && TREE_THIS_VOLATILE (node))
4422 TREE_THIS_VOLATILE (t) = 1;
4423 break;
4424 }
4425
4426 return t;
4427 }
4428
4429 #define PROCESS_ARG(N) \
4430 do { \
4431 TREE_OPERAND (t, N) = arg##N; \
4432 if (arg##N &&!TYPE_P (arg##N)) \
4433 { \
4434 if (TREE_SIDE_EFFECTS (arg##N)) \
4435 side_effects = 1; \
4436 if (!TREE_READONLY (arg##N) \
4437 && !CONSTANT_CLASS_P (arg##N)) \
4438 (void) (read_only = 0); \
4439 if (!TREE_CONSTANT (arg##N)) \
4440 (void) (constant = 0); \
4441 } \
4442 } while (0)
4443
4444 tree
4445 build2_stat (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4446 {
4447 bool constant, read_only, side_effects;
4448 tree t;
4449
4450 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4451
4452 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4453 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4454 /* When sizetype precision doesn't match that of pointers
4455 we need to be able to build explicit extensions or truncations
4456 of the offset argument. */
4457 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4458 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4459 && TREE_CODE (arg1) == INTEGER_CST);
4460
4461 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4462 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4463 && ptrofftype_p (TREE_TYPE (arg1)));
4464
4465 t = make_node_stat (code PASS_MEM_STAT);
4466 TREE_TYPE (t) = tt;
4467
4468 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4469 result based on those same flags for the arguments. But if the
4470 arguments aren't really even `tree' expressions, we shouldn't be trying
4471 to do this. */
4472
4473 /* Expressions without side effects may be constant if their
4474 arguments are as well. */
4475 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4476 || TREE_CODE_CLASS (code) == tcc_binary);
4477 read_only = 1;
4478 side_effects = TREE_SIDE_EFFECTS (t);
4479
4480 PROCESS_ARG (0);
4481 PROCESS_ARG (1);
4482
4483 TREE_SIDE_EFFECTS (t) = side_effects;
4484 if (code == MEM_REF)
4485 {
4486 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4487 {
4488 tree o = TREE_OPERAND (arg0, 0);
4489 TREE_READONLY (t) = TREE_READONLY (o);
4490 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4491 }
4492 }
4493 else
4494 {
4495 TREE_READONLY (t) = read_only;
4496 TREE_CONSTANT (t) = constant;
4497 TREE_THIS_VOLATILE (t)
4498 = (TREE_CODE_CLASS (code) == tcc_reference
4499 && arg0 && TREE_THIS_VOLATILE (arg0));
4500 }
4501
4502 return t;
4503 }
4504
4505
4506 tree
4507 build3_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4508 tree arg2 MEM_STAT_DECL)
4509 {
4510 bool constant, read_only, side_effects;
4511 tree t;
4512
4513 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4514 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4515
4516 t = make_node_stat (code PASS_MEM_STAT);
4517 TREE_TYPE (t) = tt;
4518
4519 read_only = 1;
4520
4521 /* As a special exception, if COND_EXPR has NULL branches, we
4522 assume that it is a gimple statement and always consider
4523 it to have side effects. */
4524 if (code == COND_EXPR
4525 && tt == void_type_node
4526 && arg1 == NULL_TREE
4527 && arg2 == NULL_TREE)
4528 side_effects = true;
4529 else
4530 side_effects = TREE_SIDE_EFFECTS (t);
4531
4532 PROCESS_ARG (0);
4533 PROCESS_ARG (1);
4534 PROCESS_ARG (2);
4535
4536 if (code == COND_EXPR)
4537 TREE_READONLY (t) = read_only;
4538
4539 TREE_SIDE_EFFECTS (t) = side_effects;
4540 TREE_THIS_VOLATILE (t)
4541 = (TREE_CODE_CLASS (code) == tcc_reference
4542 && arg0 && TREE_THIS_VOLATILE (arg0));
4543
4544 return t;
4545 }
4546
4547 tree
4548 build4_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4549 tree arg2, tree arg3 MEM_STAT_DECL)
4550 {
4551 bool constant, read_only, side_effects;
4552 tree t;
4553
4554 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4555
4556 t = make_node_stat (code PASS_MEM_STAT);
4557 TREE_TYPE (t) = tt;
4558
4559 side_effects = TREE_SIDE_EFFECTS (t);
4560
4561 PROCESS_ARG (0);
4562 PROCESS_ARG (1);
4563 PROCESS_ARG (2);
4564 PROCESS_ARG (3);
4565
4566 TREE_SIDE_EFFECTS (t) = side_effects;
4567 TREE_THIS_VOLATILE (t)
4568 = (TREE_CODE_CLASS (code) == tcc_reference
4569 && arg0 && TREE_THIS_VOLATILE (arg0));
4570
4571 return t;
4572 }
4573
4574 tree
4575 build5_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4576 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4577 {
4578 bool constant, read_only, side_effects;
4579 tree t;
4580
4581 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4582
4583 t = make_node_stat (code PASS_MEM_STAT);
4584 TREE_TYPE (t) = tt;
4585
4586 side_effects = TREE_SIDE_EFFECTS (t);
4587
4588 PROCESS_ARG (0);
4589 PROCESS_ARG (1);
4590 PROCESS_ARG (2);
4591 PROCESS_ARG (3);
4592 PROCESS_ARG (4);
4593
4594 TREE_SIDE_EFFECTS (t) = side_effects;
4595 if (code == TARGET_MEM_REF)
4596 {
4597 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4598 {
4599 tree o = TREE_OPERAND (arg0, 0);
4600 TREE_READONLY (t) = TREE_READONLY (o);
4601 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4602 }
4603 }
4604 else
4605 TREE_THIS_VOLATILE (t)
4606 = (TREE_CODE_CLASS (code) == tcc_reference
4607 && arg0 && TREE_THIS_VOLATILE (arg0));
4608
4609 return t;
4610 }
4611
4612 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4613 on the pointer PTR. */
4614
4615 tree
4616 build_simple_mem_ref_loc (location_t loc, tree ptr)
4617 {
4618 HOST_WIDE_INT offset = 0;
4619 tree ptype = TREE_TYPE (ptr);
4620 tree tem;
4621 /* For convenience allow addresses that collapse to a simple base
4622 and offset. */
4623 if (TREE_CODE (ptr) == ADDR_EXPR
4624 && (handled_component_p (TREE_OPERAND (ptr, 0))
4625 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4626 {
4627 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4628 gcc_assert (ptr);
4629 ptr = build_fold_addr_expr (ptr);
4630 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4631 }
4632 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4633 ptr, build_int_cst (ptype, offset));
4634 SET_EXPR_LOCATION (tem, loc);
4635 return tem;
4636 }
4637
4638 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4639
4640 offset_int
4641 mem_ref_offset (const_tree t)
4642 {
4643 return offset_int::from (TREE_OPERAND (t, 1), SIGNED);
4644 }
4645
4646 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4647 offsetted by OFFSET units. */
4648
4649 tree
4650 build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
4651 {
4652 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4653 build_fold_addr_expr (base),
4654 build_int_cst (ptr_type_node, offset));
4655 tree addr = build1 (ADDR_EXPR, type, ref);
4656 recompute_tree_invariant_for_addr_expr (addr);
4657 return addr;
4658 }
4659
4660 /* Similar except don't specify the TREE_TYPE
4661 and leave the TREE_SIDE_EFFECTS as 0.
4662 It is permissible for arguments to be null,
4663 or even garbage if their values do not matter. */
4664
4665 tree
4666 build_nt (enum tree_code code, ...)
4667 {
4668 tree t;
4669 int length;
4670 int i;
4671 va_list p;
4672
4673 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4674
4675 va_start (p, code);
4676
4677 t = make_node (code);
4678 length = TREE_CODE_LENGTH (code);
4679
4680 for (i = 0; i < length; i++)
4681 TREE_OPERAND (t, i) = va_arg (p, tree);
4682
4683 va_end (p);
4684 return t;
4685 }
4686
4687 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4688 tree vec. */
4689
4690 tree
4691 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4692 {
4693 tree ret, t;
4694 unsigned int ix;
4695
4696 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4697 CALL_EXPR_FN (ret) = fn;
4698 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4699 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4700 CALL_EXPR_ARG (ret, ix) = t;
4701 return ret;
4702 }
4703 \f
4704 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4705 We do NOT enter this node in any sort of symbol table.
4706
4707 LOC is the location of the decl.
4708
4709 layout_decl is used to set up the decl's storage layout.
4710 Other slots are initialized to 0 or null pointers. */
4711
4712 tree
4713 build_decl_stat (location_t loc, enum tree_code code, tree name,
4714 tree type MEM_STAT_DECL)
4715 {
4716 tree t;
4717
4718 t = make_node_stat (code PASS_MEM_STAT);
4719 DECL_SOURCE_LOCATION (t) = loc;
4720
4721 /* if (type == error_mark_node)
4722 type = integer_type_node; */
4723 /* That is not done, deliberately, so that having error_mark_node
4724 as the type can suppress useless errors in the use of this variable. */
4725
4726 DECL_NAME (t) = name;
4727 TREE_TYPE (t) = type;
4728
4729 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4730 layout_decl (t, 0);
4731
4732 return t;
4733 }
4734
4735 /* Builds and returns function declaration with NAME and TYPE. */
4736
4737 tree
4738 build_fn_decl (const char *name, tree type)
4739 {
4740 tree id = get_identifier (name);
4741 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4742
4743 DECL_EXTERNAL (decl) = 1;
4744 TREE_PUBLIC (decl) = 1;
4745 DECL_ARTIFICIAL (decl) = 1;
4746 TREE_NOTHROW (decl) = 1;
4747
4748 return decl;
4749 }
4750
4751 vec<tree, va_gc> *all_translation_units;
4752
4753 /* Builds a new translation-unit decl with name NAME, queues it in the
4754 global list of translation-unit decls and returns it. */
4755
4756 tree
4757 build_translation_unit_decl (tree name)
4758 {
4759 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
4760 name, NULL_TREE);
4761 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
4762 vec_safe_push (all_translation_units, tu);
4763 return tu;
4764 }
4765
4766 \f
4767 /* BLOCK nodes are used to represent the structure of binding contours
4768 and declarations, once those contours have been exited and their contents
4769 compiled. This information is used for outputting debugging info. */
4770
4771 tree
4772 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
4773 {
4774 tree block = make_node (BLOCK);
4775
4776 BLOCK_VARS (block) = vars;
4777 BLOCK_SUBBLOCKS (block) = subblocks;
4778 BLOCK_SUPERCONTEXT (block) = supercontext;
4779 BLOCK_CHAIN (block) = chain;
4780 return block;
4781 }
4782
4783 \f
4784 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4785
4786 LOC is the location to use in tree T. */
4787
4788 void
4789 protected_set_expr_location (tree t, location_t loc)
4790 {
4791 if (CAN_HAVE_LOCATION_P (t))
4792 SET_EXPR_LOCATION (t, loc);
4793 }
4794 \f
4795 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES
4796 is ATTRIBUTE. */
4797
4798 tree
4799 build_decl_attribute_variant (tree ddecl, tree attribute)
4800 {
4801 DECL_ATTRIBUTES (ddecl) = attribute;
4802 return ddecl;
4803 }
4804
4805 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4806 is ATTRIBUTE and its qualifiers are QUALS.
4807
4808 Record such modified types already made so we don't make duplicates. */
4809
4810 tree
4811 build_type_attribute_qual_variant (tree ttype, tree attribute, int quals)
4812 {
4813 if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype), attribute))
4814 {
4815 inchash::hash hstate;
4816 tree ntype;
4817 int i;
4818 tree t;
4819 enum tree_code code = TREE_CODE (ttype);
4820
4821 /* Building a distinct copy of a tagged type is inappropriate; it
4822 causes breakage in code that expects there to be a one-to-one
4823 relationship between a struct and its fields.
4824 build_duplicate_type is another solution (as used in
4825 handle_transparent_union_attribute), but that doesn't play well
4826 with the stronger C++ type identity model. */
4827 if (TREE_CODE (ttype) == RECORD_TYPE
4828 || TREE_CODE (ttype) == UNION_TYPE
4829 || TREE_CODE (ttype) == QUAL_UNION_TYPE
4830 || TREE_CODE (ttype) == ENUMERAL_TYPE)
4831 {
4832 warning (OPT_Wattributes,
4833 "ignoring attributes applied to %qT after definition",
4834 TYPE_MAIN_VARIANT (ttype));
4835 return build_qualified_type (ttype, quals);
4836 }
4837
4838 ttype = build_qualified_type (ttype, TYPE_UNQUALIFIED);
4839 ntype = build_distinct_type_copy (ttype);
4840
4841 TYPE_ATTRIBUTES (ntype) = attribute;
4842
4843 hstate.add_int (code);
4844 if (TREE_TYPE (ntype))
4845 hstate.add_object (TYPE_HASH (TREE_TYPE (ntype)));
4846 attribute_hash_list (attribute, hstate);
4847
4848 switch (TREE_CODE (ntype))
4849 {
4850 case FUNCTION_TYPE:
4851 type_hash_list (TYPE_ARG_TYPES (ntype), hstate);
4852 break;
4853 case ARRAY_TYPE:
4854 if (TYPE_DOMAIN (ntype))
4855 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (ntype)));
4856 break;
4857 case INTEGER_TYPE:
4858 t = TYPE_MAX_VALUE (ntype);
4859 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
4860 hstate.add_object (TREE_INT_CST_ELT (t, i));
4861 break;
4862 case REAL_TYPE:
4863 case FIXED_POINT_TYPE:
4864 {
4865 unsigned int precision = TYPE_PRECISION (ntype);
4866 hstate.add_object (precision);
4867 }
4868 break;
4869 default:
4870 break;
4871 }
4872
4873 ntype = type_hash_canon (hstate.end(), ntype);
4874
4875 /* If the target-dependent attributes make NTYPE different from
4876 its canonical type, we will need to use structural equality
4877 checks for this type. */
4878 if (TYPE_STRUCTURAL_EQUALITY_P (ttype)
4879 || !comp_type_attributes (ntype, ttype))
4880 SET_TYPE_STRUCTURAL_EQUALITY (ntype);
4881 else if (TYPE_CANONICAL (ntype) == ntype)
4882 TYPE_CANONICAL (ntype) = TYPE_CANONICAL (ttype);
4883
4884 ttype = build_qualified_type (ntype, quals);
4885 }
4886 else if (TYPE_QUALS (ttype) != quals)
4887 ttype = build_qualified_type (ttype, quals);
4888
4889 return ttype;
4890 }
4891
4892 /* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are
4893 the same. */
4894
4895 static bool
4896 omp_declare_simd_clauses_equal (tree clauses1, tree clauses2)
4897 {
4898 tree cl1, cl2;
4899 for (cl1 = clauses1, cl2 = clauses2;
4900 cl1 && cl2;
4901 cl1 = OMP_CLAUSE_CHAIN (cl1), cl2 = OMP_CLAUSE_CHAIN (cl2))
4902 {
4903 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_CODE (cl2))
4904 return false;
4905 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_SIMDLEN)
4906 {
4907 if (simple_cst_equal (OMP_CLAUSE_DECL (cl1),
4908 OMP_CLAUSE_DECL (cl2)) != 1)
4909 return false;
4910 }
4911 switch (OMP_CLAUSE_CODE (cl1))
4912 {
4913 case OMP_CLAUSE_ALIGNED:
4914 if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1),
4915 OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2)) != 1)
4916 return false;
4917 break;
4918 case OMP_CLAUSE_LINEAR:
4919 if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1),
4920 OMP_CLAUSE_LINEAR_STEP (cl2)) != 1)
4921 return false;
4922 break;
4923 case OMP_CLAUSE_SIMDLEN:
4924 if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1),
4925 OMP_CLAUSE_SIMDLEN_EXPR (cl2)) != 1)
4926 return false;
4927 default:
4928 break;
4929 }
4930 }
4931 return true;
4932 }
4933
4934 /* Compare two constructor-element-type constants. Return 1 if the lists
4935 are known to be equal; otherwise return 0. */
4936
4937 static bool
4938 simple_cst_list_equal (const_tree l1, const_tree l2)
4939 {
4940 while (l1 != NULL_TREE && l2 != NULL_TREE)
4941 {
4942 if (simple_cst_equal (TREE_VALUE (l1), TREE_VALUE (l2)) != 1)
4943 return false;
4944
4945 l1 = TREE_CHAIN (l1);
4946 l2 = TREE_CHAIN (l2);
4947 }
4948
4949 return l1 == l2;
4950 }
4951
4952 /* Compare two identifier nodes representing attributes. Either one may
4953 be in wrapped __ATTR__ form. Return true if they are the same, false
4954 otherwise. */
4955
4956 static bool
4957 cmp_attrib_identifiers (const_tree attr1, const_tree attr2)
4958 {
4959 /* Make sure we're dealing with IDENTIFIER_NODEs. */
4960 gcc_checking_assert (TREE_CODE (attr1) == IDENTIFIER_NODE
4961 && TREE_CODE (attr2) == IDENTIFIER_NODE);
4962
4963 /* Identifiers can be compared directly for equality. */
4964 if (attr1 == attr2)
4965 return true;
4966
4967 /* If they are not equal, they may still be one in the form
4968 'text' while the other one is in the form '__text__'. TODO:
4969 If we were storing attributes in normalized 'text' form, then
4970 this could all go away and we could take full advantage of
4971 the fact that we're comparing identifiers. :-) */
4972 const size_t attr1_len = IDENTIFIER_LENGTH (attr1);
4973 const size_t attr2_len = IDENTIFIER_LENGTH (attr2);
4974
4975 if (attr2_len == attr1_len + 4)
4976 {
4977 const char *p = IDENTIFIER_POINTER (attr2);
4978 const char *q = IDENTIFIER_POINTER (attr1);
4979 if (p[0] == '_' && p[1] == '_'
4980 && p[attr2_len - 2] == '_' && p[attr2_len - 1] == '_'
4981 && strncmp (q, p + 2, attr1_len) == 0)
4982 return true;;
4983 }
4984 else if (attr2_len + 4 == attr1_len)
4985 {
4986 const char *p = IDENTIFIER_POINTER (attr2);
4987 const char *q = IDENTIFIER_POINTER (attr1);
4988 if (q[0] == '_' && q[1] == '_'
4989 && q[attr1_len - 2] == '_' && q[attr1_len - 1] == '_'
4990 && strncmp (q + 2, p, attr2_len) == 0)
4991 return true;
4992 }
4993
4994 return false;
4995 }
4996
4997 /* Compare two attributes for their value identity. Return true if the
4998 attribute values are known to be equal; otherwise return false. */
4999
5000 bool
5001 attribute_value_equal (const_tree attr1, const_tree attr2)
5002 {
5003 if (TREE_VALUE (attr1) == TREE_VALUE (attr2))
5004 return true;
5005
5006 if (TREE_VALUE (attr1) != NULL_TREE
5007 && TREE_CODE (TREE_VALUE (attr1)) == TREE_LIST
5008 && TREE_VALUE (attr2) != NULL_TREE
5009 && TREE_CODE (TREE_VALUE (attr2)) == TREE_LIST)
5010 {
5011 /* Handle attribute format. */
5012 if (is_attribute_p ("format", TREE_PURPOSE (attr1)))
5013 {
5014 attr1 = TREE_VALUE (attr1);
5015 attr2 = TREE_VALUE (attr2);
5016 /* Compare the archetypes (printf/scanf/strftime/...). */
5017 if (!cmp_attrib_identifiers (TREE_VALUE (attr1),
5018 TREE_VALUE (attr2)))
5019 return false;
5020 /* Archetypes are the same. Compare the rest. */
5021 return (simple_cst_list_equal (TREE_CHAIN (attr1),
5022 TREE_CHAIN (attr2)) == 1);
5023 }
5024 return (simple_cst_list_equal (TREE_VALUE (attr1),
5025 TREE_VALUE (attr2)) == 1);
5026 }
5027
5028 if ((flag_openmp || flag_openmp_simd)
5029 && TREE_VALUE (attr1) && TREE_VALUE (attr2)
5030 && TREE_CODE (TREE_VALUE (attr1)) == OMP_CLAUSE
5031 && TREE_CODE (TREE_VALUE (attr2)) == OMP_CLAUSE)
5032 return omp_declare_simd_clauses_equal (TREE_VALUE (attr1),
5033 TREE_VALUE (attr2));
5034
5035 return (simple_cst_equal (TREE_VALUE (attr1), TREE_VALUE (attr2)) == 1);
5036 }
5037
5038 /* Return 0 if the attributes for two types are incompatible, 1 if they
5039 are compatible, and 2 if they are nearly compatible (which causes a
5040 warning to be generated). */
5041 int
5042 comp_type_attributes (const_tree type1, const_tree type2)
5043 {
5044 const_tree a1 = TYPE_ATTRIBUTES (type1);
5045 const_tree a2 = TYPE_ATTRIBUTES (type2);
5046 const_tree a;
5047
5048 if (a1 == a2)
5049 return 1;
5050 for (a = a1; a != NULL_TREE; a = TREE_CHAIN (a))
5051 {
5052 const struct attribute_spec *as;
5053 const_tree attr;
5054
5055 as = lookup_attribute_spec (get_attribute_name (a));
5056 if (!as || as->affects_type_identity == false)
5057 continue;
5058
5059 attr = lookup_attribute (as->name, CONST_CAST_TREE (a2));
5060 if (!attr || !attribute_value_equal (a, attr))
5061 break;
5062 }
5063 if (!a)
5064 {
5065 for (a = a2; a != NULL_TREE; a = TREE_CHAIN (a))
5066 {
5067 const struct attribute_spec *as;
5068
5069 as = lookup_attribute_spec (get_attribute_name (a));
5070 if (!as || as->affects_type_identity == false)
5071 continue;
5072
5073 if (!lookup_attribute (as->name, CONST_CAST_TREE (a1)))
5074 break;
5075 /* We don't need to compare trees again, as we did this
5076 already in first loop. */
5077 }
5078 /* All types - affecting identity - are equal, so
5079 there is no need to call target hook for comparison. */
5080 if (!a)
5081 return 1;
5082 }
5083 if (lookup_attribute ("transaction_safe", CONST_CAST_TREE (a)))
5084 return 0;
5085 /* As some type combinations - like default calling-convention - might
5086 be compatible, we have to call the target hook to get the final result. */
5087 return targetm.comp_type_attributes (type1, type2);
5088 }
5089
5090 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
5091 is ATTRIBUTE.
5092
5093 Record such modified types already made so we don't make duplicates. */
5094
5095 tree
5096 build_type_attribute_variant (tree ttype, tree attribute)
5097 {
5098 return build_type_attribute_qual_variant (ttype, attribute,
5099 TYPE_QUALS (ttype));
5100 }
5101
5102
5103 /* Reset the expression *EXPR_P, a size or position.
5104
5105 ??? We could reset all non-constant sizes or positions. But it's cheap
5106 enough to not do so and refrain from adding workarounds to dwarf2out.c.
5107
5108 We need to reset self-referential sizes or positions because they cannot
5109 be gimplified and thus can contain a CALL_EXPR after the gimplification
5110 is finished, which will run afoul of LTO streaming. And they need to be
5111 reset to something essentially dummy but not constant, so as to preserve
5112 the properties of the object they are attached to. */
5113
5114 static inline void
5115 free_lang_data_in_one_sizepos (tree *expr_p)
5116 {
5117 tree expr = *expr_p;
5118 if (CONTAINS_PLACEHOLDER_P (expr))
5119 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
5120 }
5121
5122
5123 /* Reset all the fields in a binfo node BINFO. We only keep
5124 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
5125
5126 static void
5127 free_lang_data_in_binfo (tree binfo)
5128 {
5129 unsigned i;
5130 tree t;
5131
5132 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
5133
5134 BINFO_VIRTUALS (binfo) = NULL_TREE;
5135 BINFO_BASE_ACCESSES (binfo) = NULL;
5136 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
5137 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
5138
5139 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
5140 free_lang_data_in_binfo (t);
5141 }
5142
5143
5144 /* Reset all language specific information still present in TYPE. */
5145
5146 static void
5147 free_lang_data_in_type (tree type)
5148 {
5149 gcc_assert (TYPE_P (type));
5150
5151 /* Give the FE a chance to remove its own data first. */
5152 lang_hooks.free_lang_data (type);
5153
5154 TREE_LANG_FLAG_0 (type) = 0;
5155 TREE_LANG_FLAG_1 (type) = 0;
5156 TREE_LANG_FLAG_2 (type) = 0;
5157 TREE_LANG_FLAG_3 (type) = 0;
5158 TREE_LANG_FLAG_4 (type) = 0;
5159 TREE_LANG_FLAG_5 (type) = 0;
5160 TREE_LANG_FLAG_6 (type) = 0;
5161
5162 if (TREE_CODE (type) == FUNCTION_TYPE)
5163 {
5164 /* Remove the const and volatile qualifiers from arguments. The
5165 C++ front end removes them, but the C front end does not,
5166 leading to false ODR violation errors when merging two
5167 instances of the same function signature compiled by
5168 different front ends. */
5169 tree p;
5170
5171 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5172 {
5173 tree arg_type = TREE_VALUE (p);
5174
5175 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
5176 {
5177 int quals = TYPE_QUALS (arg_type)
5178 & ~TYPE_QUAL_CONST
5179 & ~TYPE_QUAL_VOLATILE;
5180 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
5181 free_lang_data_in_type (TREE_VALUE (p));
5182 }
5183 /* C++ FE uses TREE_PURPOSE to store initial values. */
5184 TREE_PURPOSE (p) = NULL;
5185 }
5186 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
5187 TYPE_MINVAL (type) = NULL;
5188 }
5189 if (TREE_CODE (type) == METHOD_TYPE)
5190 {
5191 tree p;
5192
5193 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5194 {
5195 /* C++ FE uses TREE_PURPOSE to store initial values. */
5196 TREE_PURPOSE (p) = NULL;
5197 }
5198 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
5199 TYPE_MINVAL (type) = NULL;
5200 }
5201
5202 /* Remove members that are not actually FIELD_DECLs from the field
5203 list of an aggregate. These occur in C++. */
5204 if (RECORD_OR_UNION_TYPE_P (type))
5205 {
5206 tree prev, member;
5207
5208 /* Note that TYPE_FIELDS can be shared across distinct
5209 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is
5210 to be removed, we cannot set its TREE_CHAIN to NULL.
5211 Otherwise, we would not be able to find all the other fields
5212 in the other instances of this TREE_TYPE.
5213
5214 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */
5215 prev = NULL_TREE;
5216 member = TYPE_FIELDS (type);
5217 while (member)
5218 {
5219 if (TREE_CODE (member) == FIELD_DECL
5220 || (TREE_CODE (member) == TYPE_DECL
5221 && !DECL_IGNORED_P (member)
5222 && debug_info_level > DINFO_LEVEL_TERSE
5223 && !is_redundant_typedef (member)))
5224 {
5225 if (prev)
5226 TREE_CHAIN (prev) = member;
5227 else
5228 TYPE_FIELDS (type) = member;
5229 prev = member;
5230 }
5231
5232 member = TREE_CHAIN (member);
5233 }
5234
5235 if (prev)
5236 TREE_CHAIN (prev) = NULL_TREE;
5237 else
5238 TYPE_FIELDS (type) = NULL_TREE;
5239
5240 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
5241 and danagle the pointer from time to time. */
5242 if (TYPE_VFIELD (type) && TREE_CODE (TYPE_VFIELD (type)) != FIELD_DECL)
5243 TYPE_VFIELD (type) = NULL_TREE;
5244
5245 /* Remove TYPE_METHODS list. While it would be nice to keep it
5246 to enable ODR warnings about different method lists, doing so
5247 seems to impractically increase size of LTO data streamed.
5248 Keep the information if TYPE_METHODS was non-NULL. This is used
5249 by function.c and pretty printers. */
5250 if (TYPE_METHODS (type))
5251 TYPE_METHODS (type) = error_mark_node;
5252 if (TYPE_BINFO (type))
5253 {
5254 free_lang_data_in_binfo (TYPE_BINFO (type));
5255 /* We need to preserve link to bases and virtual table for all
5256 polymorphic types to make devirtualization machinery working.
5257 Debug output cares only about bases, but output also
5258 virtual table pointers so merging of -fdevirtualize and
5259 -fno-devirtualize units is easier. */
5260 if ((!BINFO_VTABLE (TYPE_BINFO (type))
5261 || !flag_devirtualize)
5262 && ((!BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
5263 && !BINFO_VTABLE (TYPE_BINFO (type)))
5264 || debug_info_level != DINFO_LEVEL_NONE))
5265 TYPE_BINFO (type) = NULL;
5266 }
5267 }
5268 else
5269 {
5270 /* For non-aggregate types, clear out the language slot (which
5271 overloads TYPE_BINFO). */
5272 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
5273
5274 if (INTEGRAL_TYPE_P (type)
5275 || SCALAR_FLOAT_TYPE_P (type)
5276 || FIXED_POINT_TYPE_P (type))
5277 {
5278 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
5279 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
5280 }
5281 }
5282
5283 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
5284 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
5285
5286 if (TYPE_CONTEXT (type)
5287 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
5288 {
5289 tree ctx = TYPE_CONTEXT (type);
5290 do
5291 {
5292 ctx = BLOCK_SUPERCONTEXT (ctx);
5293 }
5294 while (ctx && TREE_CODE (ctx) == BLOCK);
5295 TYPE_CONTEXT (type) = ctx;
5296 }
5297 }
5298
5299
5300 /* Return true if DECL may need an assembler name to be set. */
5301
5302 static inline bool
5303 need_assembler_name_p (tree decl)
5304 {
5305 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
5306 Rule merging. This makes type_odr_p to return true on those types during
5307 LTO and by comparing the mangled name, we can say what types are intended
5308 to be equivalent across compilation unit.
5309
5310 We do not store names of type_in_anonymous_namespace_p.
5311
5312 Record, union and enumeration type have linkage that allows use
5313 to check type_in_anonymous_namespace_p. We do not mangle compound types
5314 that always can be compared structurally.
5315
5316 Similarly for builtin types, we compare properties of their main variant.
5317 A special case are integer types where mangling do make differences
5318 between char/signed char/unsigned char etc. Storing name for these makes
5319 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
5320 See cp/mangle.c:write_builtin_type for details. */
5321
5322 if (flag_lto_odr_type_mering
5323 && TREE_CODE (decl) == TYPE_DECL
5324 && DECL_NAME (decl)
5325 && decl == TYPE_NAME (TREE_TYPE (decl))
5326 && TYPE_MAIN_VARIANT (TREE_TYPE (decl)) == TREE_TYPE (decl)
5327 && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
5328 && (type_with_linkage_p (TREE_TYPE (decl))
5329 || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
5330 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
5331 return !DECL_ASSEMBLER_NAME_SET_P (decl);
5332 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5333 if (TREE_CODE (decl) != FUNCTION_DECL
5334 && TREE_CODE (decl) != VAR_DECL)
5335 return false;
5336
5337 /* If DECL already has its assembler name set, it does not need a
5338 new one. */
5339 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5340 || DECL_ASSEMBLER_NAME_SET_P (decl))
5341 return false;
5342
5343 /* Abstract decls do not need an assembler name. */
5344 if (DECL_ABSTRACT_P (decl))
5345 return false;
5346
5347 /* For VAR_DECLs, only static, public and external symbols need an
5348 assembler name. */
5349 if (TREE_CODE (decl) == VAR_DECL
5350 && !TREE_STATIC (decl)
5351 && !TREE_PUBLIC (decl)
5352 && !DECL_EXTERNAL (decl))
5353 return false;
5354
5355 if (TREE_CODE (decl) == FUNCTION_DECL)
5356 {
5357 /* Do not set assembler name on builtins. Allow RTL expansion to
5358 decide whether to expand inline or via a regular call. */
5359 if (DECL_BUILT_IN (decl)
5360 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5361 return false;
5362
5363 /* Functions represented in the callgraph need an assembler name. */
5364 if (cgraph_node::get (decl) != NULL)
5365 return true;
5366
5367 /* Unused and not public functions don't need an assembler name. */
5368 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5369 return false;
5370 }
5371
5372 return true;
5373 }
5374
5375
5376 /* Reset all language specific information still present in symbol
5377 DECL. */
5378
5379 static void
5380 free_lang_data_in_decl (tree decl)
5381 {
5382 gcc_assert (DECL_P (decl));
5383
5384 /* Give the FE a chance to remove its own data first. */
5385 lang_hooks.free_lang_data (decl);
5386
5387 TREE_LANG_FLAG_0 (decl) = 0;
5388 TREE_LANG_FLAG_1 (decl) = 0;
5389 TREE_LANG_FLAG_2 (decl) = 0;
5390 TREE_LANG_FLAG_3 (decl) = 0;
5391 TREE_LANG_FLAG_4 (decl) = 0;
5392 TREE_LANG_FLAG_5 (decl) = 0;
5393 TREE_LANG_FLAG_6 (decl) = 0;
5394
5395 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5396 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5397 if (TREE_CODE (decl) == FIELD_DECL)
5398 {
5399 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5400 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5401 DECL_QUALIFIER (decl) = NULL_TREE;
5402 }
5403
5404 if (TREE_CODE (decl) == FUNCTION_DECL)
5405 {
5406 struct cgraph_node *node;
5407 if (!(node = cgraph_node::get (decl))
5408 || (!node->definition && !node->clones))
5409 {
5410 if (node)
5411 node->release_body ();
5412 else
5413 {
5414 release_function_body (decl);
5415 DECL_ARGUMENTS (decl) = NULL;
5416 DECL_RESULT (decl) = NULL;
5417 DECL_INITIAL (decl) = error_mark_node;
5418 }
5419 }
5420 if (gimple_has_body_p (decl) || (node && node->thunk.thunk_p))
5421 {
5422 tree t;
5423
5424 /* If DECL has a gimple body, then the context for its
5425 arguments must be DECL. Otherwise, it doesn't really
5426 matter, as we will not be emitting any code for DECL. In
5427 general, there may be other instances of DECL created by
5428 the front end and since PARM_DECLs are generally shared,
5429 their DECL_CONTEXT changes as the replicas of DECL are
5430 created. The only time where DECL_CONTEXT is important
5431 is for the FUNCTION_DECLs that have a gimple body (since
5432 the PARM_DECL will be used in the function's body). */
5433 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5434 DECL_CONTEXT (t) = decl;
5435 if (!DECL_FUNCTION_SPECIFIC_TARGET (decl))
5436 DECL_FUNCTION_SPECIFIC_TARGET (decl)
5437 = target_option_default_node;
5438 if (!DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl))
5439 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
5440 = optimization_default_node;
5441 }
5442
5443 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5444 At this point, it is not needed anymore. */
5445 DECL_SAVED_TREE (decl) = NULL_TREE;
5446
5447 /* Clear the abstract origin if it refers to a method. Otherwise
5448 dwarf2out.c will ICE as we clear TYPE_METHODS and thus the
5449 origin will not be output correctly. */
5450 if (DECL_ABSTRACT_ORIGIN (decl)
5451 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5452 && RECORD_OR_UNION_TYPE_P
5453 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5454 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5455
5456 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5457 DECL_VINDEX referring to itself into a vtable slot number as it
5458 should. Happens with functions that are copied and then forgotten
5459 about. Just clear it, it won't matter anymore. */
5460 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl)))
5461 DECL_VINDEX (decl) = NULL_TREE;
5462 }
5463 else if (TREE_CODE (decl) == VAR_DECL)
5464 {
5465 if ((DECL_EXTERNAL (decl)
5466 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5467 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5468 DECL_INITIAL (decl) = NULL_TREE;
5469 }
5470 else if (TREE_CODE (decl) == TYPE_DECL)
5471 {
5472 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5473 DECL_VISIBILITY_SPECIFIED (decl) = 0;
5474 DECL_INITIAL (decl) = NULL_TREE;
5475 }
5476 else if (TREE_CODE (decl) == FIELD_DECL)
5477 DECL_INITIAL (decl) = NULL_TREE;
5478 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5479 && DECL_INITIAL (decl)
5480 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5481 {
5482 /* Strip builtins from the translation-unit BLOCK. We still have targets
5483 without builtin_decl_explicit support and also builtins are shared
5484 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5485 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5486 while (*nextp)
5487 {
5488 tree var = *nextp;
5489 if (TREE_CODE (var) == FUNCTION_DECL
5490 && DECL_BUILT_IN (var))
5491 *nextp = TREE_CHAIN (var);
5492 else
5493 nextp = &TREE_CHAIN (var);
5494 }
5495 }
5496 }
5497
5498
5499 /* Data used when collecting DECLs and TYPEs for language data removal. */
5500
5501 struct free_lang_data_d
5502 {
5503 /* Worklist to avoid excessive recursion. */
5504 vec<tree> worklist;
5505
5506 /* Set of traversed objects. Used to avoid duplicate visits. */
5507 hash_set<tree> *pset;
5508
5509 /* Array of symbols to process with free_lang_data_in_decl. */
5510 vec<tree> decls;
5511
5512 /* Array of types to process with free_lang_data_in_type. */
5513 vec<tree> types;
5514 };
5515
5516
5517 /* Save all language fields needed to generate proper debug information
5518 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5519
5520 static void
5521 save_debug_info_for_decl (tree t)
5522 {
5523 /*struct saved_debug_info_d *sdi;*/
5524
5525 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t));
5526
5527 /* FIXME. Partial implementation for saving debug info removed. */
5528 }
5529
5530
5531 /* Save all language fields needed to generate proper debug information
5532 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5533
5534 static void
5535 save_debug_info_for_type (tree t)
5536 {
5537 /*struct saved_debug_info_d *sdi;*/
5538
5539 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t));
5540
5541 /* FIXME. Partial implementation for saving debug info removed. */
5542 }
5543
5544
5545 /* Add type or decl T to one of the list of tree nodes that need their
5546 language data removed. The lists are held inside FLD. */
5547
5548 static void
5549 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5550 {
5551 if (DECL_P (t))
5552 {
5553 fld->decls.safe_push (t);
5554 if (debug_info_level > DINFO_LEVEL_TERSE)
5555 save_debug_info_for_decl (t);
5556 }
5557 else if (TYPE_P (t))
5558 {
5559 fld->types.safe_push (t);
5560 if (debug_info_level > DINFO_LEVEL_TERSE)
5561 save_debug_info_for_type (t);
5562 }
5563 else
5564 gcc_unreachable ();
5565 }
5566
5567 /* Push tree node T into FLD->WORKLIST. */
5568
5569 static inline void
5570 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5571 {
5572 if (t && !is_lang_specific (t) && !fld->pset->contains (t))
5573 fld->worklist.safe_push ((t));
5574 }
5575
5576
5577 /* Operand callback helper for free_lang_data_in_node. *TP is the
5578 subtree operand being considered. */
5579
5580 static tree
5581 find_decls_types_r (tree *tp, int *ws, void *data)
5582 {
5583 tree t = *tp;
5584 struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5585
5586 if (TREE_CODE (t) == TREE_LIST)
5587 return NULL_TREE;
5588
5589 /* Language specific nodes will be removed, so there is no need
5590 to gather anything under them. */
5591 if (is_lang_specific (t))
5592 {
5593 *ws = 0;
5594 return NULL_TREE;
5595 }
5596
5597 if (DECL_P (t))
5598 {
5599 /* Note that walk_tree does not traverse every possible field in
5600 decls, so we have to do our own traversals here. */
5601 add_tree_to_fld_list (t, fld);
5602
5603 fld_worklist_push (DECL_NAME (t), fld);
5604 fld_worklist_push (DECL_CONTEXT (t), fld);
5605 fld_worklist_push (DECL_SIZE (t), fld);
5606 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5607
5608 /* We are going to remove everything under DECL_INITIAL for
5609 TYPE_DECLs. No point walking them. */
5610 if (TREE_CODE (t) != TYPE_DECL)
5611 fld_worklist_push (DECL_INITIAL (t), fld);
5612
5613 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5614 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5615
5616 if (TREE_CODE (t) == FUNCTION_DECL)
5617 {
5618 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5619 fld_worklist_push (DECL_RESULT (t), fld);
5620 }
5621 else if (TREE_CODE (t) == TYPE_DECL)
5622 {
5623 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
5624 }
5625 else if (TREE_CODE (t) == FIELD_DECL)
5626 {
5627 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5628 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5629 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5630 fld_worklist_push (DECL_FCONTEXT (t), fld);
5631 }
5632
5633 if ((TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
5634 && DECL_HAS_VALUE_EXPR_P (t))
5635 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5636
5637 if (TREE_CODE (t) != FIELD_DECL
5638 && TREE_CODE (t) != TYPE_DECL)
5639 fld_worklist_push (TREE_CHAIN (t), fld);
5640 *ws = 0;
5641 }
5642 else if (TYPE_P (t))
5643 {
5644 /* Note that walk_tree does not traverse every possible field in
5645 types, so we have to do our own traversals here. */
5646 add_tree_to_fld_list (t, fld);
5647
5648 if (!RECORD_OR_UNION_TYPE_P (t))
5649 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5650 fld_worklist_push (TYPE_SIZE (t), fld);
5651 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5652 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5653 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5654 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5655 fld_worklist_push (TYPE_NAME (t), fld);
5656 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5657 them and thus do not and want not to reach unused pointer types
5658 this way. */
5659 if (!POINTER_TYPE_P (t))
5660 fld_worklist_push (TYPE_MINVAL (t), fld);
5661 if (!RECORD_OR_UNION_TYPE_P (t))
5662 fld_worklist_push (TYPE_MAXVAL (t), fld);
5663 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5664 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5665 do not and want not to reach unused variants this way. */
5666 if (TYPE_CONTEXT (t))
5667 {
5668 tree ctx = TYPE_CONTEXT (t);
5669 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5670 So push that instead. */
5671 while (ctx && TREE_CODE (ctx) == BLOCK)
5672 ctx = BLOCK_SUPERCONTEXT (ctx);
5673 fld_worklist_push (ctx, fld);
5674 }
5675 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5676 and want not to reach unused types this way. */
5677
5678 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5679 {
5680 unsigned i;
5681 tree tem;
5682 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5683 fld_worklist_push (TREE_TYPE (tem), fld);
5684 tem = BINFO_VIRTUALS (TYPE_BINFO (t));
5685 if (tem
5686 /* The Java FE overloads BINFO_VIRTUALS for its own purpose. */
5687 && TREE_CODE (tem) == TREE_LIST)
5688 do
5689 {
5690 fld_worklist_push (TREE_VALUE (tem), fld);
5691 tem = TREE_CHAIN (tem);
5692 }
5693 while (tem);
5694 }
5695 if (RECORD_OR_UNION_TYPE_P (t))
5696 {
5697 tree tem;
5698 /* Push all TYPE_FIELDS - there can be interleaving interesting
5699 and non-interesting things. */
5700 tem = TYPE_FIELDS (t);
5701 while (tem)
5702 {
5703 if (TREE_CODE (tem) == FIELD_DECL
5704 || (TREE_CODE (tem) == TYPE_DECL
5705 && !DECL_IGNORED_P (tem)
5706 && debug_info_level > DINFO_LEVEL_TERSE
5707 && !is_redundant_typedef (tem)))
5708 fld_worklist_push (tem, fld);
5709 tem = TREE_CHAIN (tem);
5710 }
5711 }
5712
5713 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5714 *ws = 0;
5715 }
5716 else if (TREE_CODE (t) == BLOCK)
5717 {
5718 tree tem;
5719 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
5720 fld_worklist_push (tem, fld);
5721 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5722 fld_worklist_push (tem, fld);
5723 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5724 }
5725
5726 if (TREE_CODE (t) != IDENTIFIER_NODE
5727 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5728 fld_worklist_push (TREE_TYPE (t), fld);
5729
5730 return NULL_TREE;
5731 }
5732
5733
5734 /* Find decls and types in T. */
5735
5736 static void
5737 find_decls_types (tree t, struct free_lang_data_d *fld)
5738 {
5739 while (1)
5740 {
5741 if (!fld->pset->contains (t))
5742 walk_tree (&t, find_decls_types_r, fld, fld->pset);
5743 if (fld->worklist.is_empty ())
5744 break;
5745 t = fld->worklist.pop ();
5746 }
5747 }
5748
5749 /* Translate all the types in LIST with the corresponding runtime
5750 types. */
5751
5752 static tree
5753 get_eh_types_for_runtime (tree list)
5754 {
5755 tree head, prev;
5756
5757 if (list == NULL_TREE)
5758 return NULL_TREE;
5759
5760 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5761 prev = head;
5762 list = TREE_CHAIN (list);
5763 while (list)
5764 {
5765 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5766 TREE_CHAIN (prev) = n;
5767 prev = TREE_CHAIN (prev);
5768 list = TREE_CHAIN (list);
5769 }
5770
5771 return head;
5772 }
5773
5774
5775 /* Find decls and types referenced in EH region R and store them in
5776 FLD->DECLS and FLD->TYPES. */
5777
5778 static void
5779 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5780 {
5781 switch (r->type)
5782 {
5783 case ERT_CLEANUP:
5784 break;
5785
5786 case ERT_TRY:
5787 {
5788 eh_catch c;
5789
5790 /* The types referenced in each catch must first be changed to the
5791 EH types used at runtime. This removes references to FE types
5792 in the region. */
5793 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5794 {
5795 c->type_list = get_eh_types_for_runtime (c->type_list);
5796 walk_tree (&c->type_list, find_decls_types_r, fld, fld->pset);
5797 }
5798 }
5799 break;
5800
5801 case ERT_ALLOWED_EXCEPTIONS:
5802 r->u.allowed.type_list
5803 = get_eh_types_for_runtime (r->u.allowed.type_list);
5804 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, fld->pset);
5805 break;
5806
5807 case ERT_MUST_NOT_THROW:
5808 walk_tree (&r->u.must_not_throw.failure_decl,
5809 find_decls_types_r, fld, fld->pset);
5810 break;
5811 }
5812 }
5813
5814
5815 /* Find decls and types referenced in cgraph node N and store them in
5816 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5817 look for *every* kind of DECL and TYPE node reachable from N,
5818 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5819 NAMESPACE_DECLs, etc). */
5820
5821 static void
5822 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5823 {
5824 basic_block bb;
5825 struct function *fn;
5826 unsigned ix;
5827 tree t;
5828
5829 find_decls_types (n->decl, fld);
5830
5831 if (!gimple_has_body_p (n->decl))
5832 return;
5833
5834 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5835
5836 fn = DECL_STRUCT_FUNCTION (n->decl);
5837
5838 /* Traverse locals. */
5839 FOR_EACH_LOCAL_DECL (fn, ix, t)
5840 find_decls_types (t, fld);
5841
5842 /* Traverse EH regions in FN. */
5843 {
5844 eh_region r;
5845 FOR_ALL_EH_REGION_FN (r, fn)
5846 find_decls_types_in_eh_region (r, fld);
5847 }
5848
5849 /* Traverse every statement in FN. */
5850 FOR_EACH_BB_FN (bb, fn)
5851 {
5852 gphi_iterator psi;
5853 gimple_stmt_iterator si;
5854 unsigned i;
5855
5856 for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
5857 {
5858 gphi *phi = psi.phi ();
5859
5860 for (i = 0; i < gimple_phi_num_args (phi); i++)
5861 {
5862 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5863 find_decls_types (*arg_p, fld);
5864 }
5865 }
5866
5867 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5868 {
5869 gimple *stmt = gsi_stmt (si);
5870
5871 if (is_gimple_call (stmt))
5872 find_decls_types (gimple_call_fntype (stmt), fld);
5873
5874 for (i = 0; i < gimple_num_ops (stmt); i++)
5875 {
5876 tree arg = gimple_op (stmt, i);
5877 find_decls_types (arg, fld);
5878 }
5879 }
5880 }
5881 }
5882
5883
5884 /* Find decls and types referenced in varpool node N and store them in
5885 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5886 look for *every* kind of DECL and TYPE node reachable from N,
5887 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5888 NAMESPACE_DECLs, etc). */
5889
5890 static void
5891 find_decls_types_in_var (varpool_node *v, struct free_lang_data_d *fld)
5892 {
5893 find_decls_types (v->decl, fld);
5894 }
5895
5896 /* If T needs an assembler name, have one created for it. */
5897
5898 void
5899 assign_assembler_name_if_neeeded (tree t)
5900 {
5901 if (need_assembler_name_p (t))
5902 {
5903 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5904 diagnostics that use input_location to show locus
5905 information. The problem here is that, at this point,
5906 input_location is generally anchored to the end of the file
5907 (since the parser is long gone), so we don't have a good
5908 position to pin it to.
5909
5910 To alleviate this problem, this uses the location of T's
5911 declaration. Examples of this are
5912 testsuite/g++.dg/template/cond2.C and
5913 testsuite/g++.dg/template/pr35240.C. */
5914 location_t saved_location = input_location;
5915 input_location = DECL_SOURCE_LOCATION (t);
5916
5917 decl_assembler_name (t);
5918
5919 input_location = saved_location;
5920 }
5921 }
5922
5923
5924 /* Free language specific information for every operand and expression
5925 in every node of the call graph. This process operates in three stages:
5926
5927 1- Every callgraph node and varpool node is traversed looking for
5928 decls and types embedded in them. This is a more exhaustive
5929 search than that done by find_referenced_vars, because it will
5930 also collect individual fields, decls embedded in types, etc.
5931
5932 2- All the decls found are sent to free_lang_data_in_decl.
5933
5934 3- All the types found are sent to free_lang_data_in_type.
5935
5936 The ordering between decls and types is important because
5937 free_lang_data_in_decl sets assembler names, which includes
5938 mangling. So types cannot be freed up until assembler names have
5939 been set up. */
5940
5941 static void
5942 free_lang_data_in_cgraph (void)
5943 {
5944 struct cgraph_node *n;
5945 varpool_node *v;
5946 struct free_lang_data_d fld;
5947 tree t;
5948 unsigned i;
5949 alias_pair *p;
5950
5951 /* Initialize sets and arrays to store referenced decls and types. */
5952 fld.pset = new hash_set<tree>;
5953 fld.worklist.create (0);
5954 fld.decls.create (100);
5955 fld.types.create (100);
5956
5957 /* Find decls and types in the body of every function in the callgraph. */
5958 FOR_EACH_FUNCTION (n)
5959 find_decls_types_in_node (n, &fld);
5960
5961 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
5962 find_decls_types (p->decl, &fld);
5963
5964 /* Find decls and types in every varpool symbol. */
5965 FOR_EACH_VARIABLE (v)
5966 find_decls_types_in_var (v, &fld);
5967
5968 /* Set the assembler name on every decl found. We need to do this
5969 now because free_lang_data_in_decl will invalidate data needed
5970 for mangling. This breaks mangling on interdependent decls. */
5971 FOR_EACH_VEC_ELT (fld.decls, i, t)
5972 assign_assembler_name_if_neeeded (t);
5973
5974 /* Traverse every decl found freeing its language data. */
5975 FOR_EACH_VEC_ELT (fld.decls, i, t)
5976 free_lang_data_in_decl (t);
5977
5978 /* Traverse every type found freeing its language data. */
5979 FOR_EACH_VEC_ELT (fld.types, i, t)
5980 free_lang_data_in_type (t);
5981 if (flag_checking)
5982 {
5983 FOR_EACH_VEC_ELT (fld.types, i, t)
5984 verify_type (t);
5985 }
5986
5987 delete fld.pset;
5988 fld.worklist.release ();
5989 fld.decls.release ();
5990 fld.types.release ();
5991 }
5992
5993
5994 /* Free resources that are used by FE but are not needed once they are done. */
5995
5996 static unsigned
5997 free_lang_data (void)
5998 {
5999 unsigned i;
6000
6001 /* If we are the LTO frontend we have freed lang-specific data already. */
6002 if (in_lto_p
6003 || (!flag_generate_lto && !flag_generate_offload))
6004 return 0;
6005
6006 /* Allocate and assign alias sets to the standard integer types
6007 while the slots are still in the way the frontends generated them. */
6008 for (i = 0; i < itk_none; ++i)
6009 if (integer_types[i])
6010 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
6011
6012 /* Traverse the IL resetting language specific information for
6013 operands, expressions, etc. */
6014 free_lang_data_in_cgraph ();
6015
6016 /* Create gimple variants for common types. */
6017 ptrdiff_type_node = integer_type_node;
6018 fileptr_type_node = ptr_type_node;
6019
6020 /* Reset some langhooks. Do not reset types_compatible_p, it may
6021 still be used indirectly via the get_alias_set langhook. */
6022 lang_hooks.dwarf_name = lhd_dwarf_name;
6023 lang_hooks.decl_printable_name = gimple_decl_printable_name;
6024 lang_hooks.gimplify_expr = lhd_gimplify_expr;
6025
6026 /* We do not want the default decl_assembler_name implementation,
6027 rather if we have fixed everything we want a wrapper around it
6028 asserting that all non-local symbols already got their assembler
6029 name and only produce assembler names for local symbols. Or rather
6030 make sure we never call decl_assembler_name on local symbols and
6031 devise a separate, middle-end private scheme for it. */
6032
6033 /* Reset diagnostic machinery. */
6034 tree_diagnostics_defaults (global_dc);
6035
6036 return 0;
6037 }
6038
6039
6040 namespace {
6041
6042 const pass_data pass_data_ipa_free_lang_data =
6043 {
6044 SIMPLE_IPA_PASS, /* type */
6045 "*free_lang_data", /* name */
6046 OPTGROUP_NONE, /* optinfo_flags */
6047 TV_IPA_FREE_LANG_DATA, /* tv_id */
6048 0, /* properties_required */
6049 0, /* properties_provided */
6050 0, /* properties_destroyed */
6051 0, /* todo_flags_start */
6052 0, /* todo_flags_finish */
6053 };
6054
6055 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
6056 {
6057 public:
6058 pass_ipa_free_lang_data (gcc::context *ctxt)
6059 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
6060 {}
6061
6062 /* opt_pass methods: */
6063 virtual unsigned int execute (function *) { return free_lang_data (); }
6064
6065 }; // class pass_ipa_free_lang_data
6066
6067 } // anon namespace
6068
6069 simple_ipa_opt_pass *
6070 make_pass_ipa_free_lang_data (gcc::context *ctxt)
6071 {
6072 return new pass_ipa_free_lang_data (ctxt);
6073 }
6074
6075 /* The backbone of is_attribute_p(). ATTR_LEN is the string length of
6076 ATTR_NAME. Also used internally by remove_attribute(). */
6077 bool
6078 private_is_attribute_p (const char *attr_name, size_t attr_len, const_tree ident)
6079 {
6080 size_t ident_len = IDENTIFIER_LENGTH (ident);
6081
6082 if (ident_len == attr_len)
6083 {
6084 if (strcmp (attr_name, IDENTIFIER_POINTER (ident)) == 0)
6085 return true;
6086 }
6087 else if (ident_len == attr_len + 4)
6088 {
6089 /* There is the possibility that ATTR is 'text' and IDENT is
6090 '__text__'. */
6091 const char *p = IDENTIFIER_POINTER (ident);
6092 if (p[0] == '_' && p[1] == '_'
6093 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
6094 && strncmp (attr_name, p + 2, attr_len) == 0)
6095 return true;
6096 }
6097
6098 return false;
6099 }
6100
6101 /* The backbone of lookup_attribute(). ATTR_LEN is the string length
6102 of ATTR_NAME, and LIST is not NULL_TREE. */
6103 tree
6104 private_lookup_attribute (const char *attr_name, size_t attr_len, tree list)
6105 {
6106 while (list)
6107 {
6108 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
6109
6110 if (ident_len == attr_len)
6111 {
6112 if (!strcmp (attr_name,
6113 IDENTIFIER_POINTER (get_attribute_name (list))))
6114 break;
6115 }
6116 /* TODO: If we made sure that attributes were stored in the
6117 canonical form without '__...__' (ie, as in 'text' as opposed
6118 to '__text__') then we could avoid the following case. */
6119 else if (ident_len == attr_len + 4)
6120 {
6121 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
6122 if (p[0] == '_' && p[1] == '_'
6123 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
6124 && strncmp (attr_name, p + 2, attr_len) == 0)
6125 break;
6126 }
6127 list = TREE_CHAIN (list);
6128 }
6129
6130 return list;
6131 }
6132
6133 /* Given an attribute name ATTR_NAME and a list of attributes LIST,
6134 return a pointer to the attribute's list first element if the attribute
6135 starts with ATTR_NAME. ATTR_NAME must be in the form 'text' (not
6136 '__text__'). */
6137
6138 tree
6139 private_lookup_attribute_by_prefix (const char *attr_name, size_t attr_len,
6140 tree list)
6141 {
6142 while (list)
6143 {
6144 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
6145
6146 if (attr_len > ident_len)
6147 {
6148 list = TREE_CHAIN (list);
6149 continue;
6150 }
6151
6152 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
6153
6154 if (strncmp (attr_name, p, attr_len) == 0)
6155 break;
6156
6157 /* TODO: If we made sure that attributes were stored in the
6158 canonical form without '__...__' (ie, as in 'text' as opposed
6159 to '__text__') then we could avoid the following case. */
6160 if (p[0] == '_' && p[1] == '_' &&
6161 strncmp (attr_name, p + 2, attr_len) == 0)
6162 break;
6163
6164 list = TREE_CHAIN (list);
6165 }
6166
6167 return list;
6168 }
6169
6170
6171 /* A variant of lookup_attribute() that can be used with an identifier
6172 as the first argument, and where the identifier can be either
6173 'text' or '__text__'.
6174
6175 Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST,
6176 return a pointer to the attribute's list element if the attribute
6177 is part of the list, or NULL_TREE if not found. If the attribute
6178 appears more than once, this only returns the first occurrence; the
6179 TREE_CHAIN of the return value should be passed back in if further
6180 occurrences are wanted. ATTR_IDENTIFIER must be an identifier but
6181 can be in the form 'text' or '__text__'. */
6182 static tree
6183 lookup_ident_attribute (tree attr_identifier, tree list)
6184 {
6185 gcc_checking_assert (TREE_CODE (attr_identifier) == IDENTIFIER_NODE);
6186
6187 while (list)
6188 {
6189 gcc_checking_assert (TREE_CODE (get_attribute_name (list))
6190 == IDENTIFIER_NODE);
6191
6192 if (cmp_attrib_identifiers (attr_identifier,
6193 get_attribute_name (list)))
6194 /* Found it. */
6195 break;
6196 list = TREE_CHAIN (list);
6197 }
6198
6199 return list;
6200 }
6201
6202 /* Remove any instances of attribute ATTR_NAME in LIST and return the
6203 modified list. */
6204
6205 tree
6206 remove_attribute (const char *attr_name, tree list)
6207 {
6208 tree *p;
6209 size_t attr_len = strlen (attr_name);
6210
6211 gcc_checking_assert (attr_name[0] != '_');
6212
6213 for (p = &list; *p; )
6214 {
6215 tree l = *p;
6216 /* TODO: If we were storing attributes in normalized form, here
6217 we could use a simple strcmp(). */
6218 if (private_is_attribute_p (attr_name, attr_len, get_attribute_name (l)))
6219 *p = TREE_CHAIN (l);
6220 else
6221 p = &TREE_CHAIN (l);
6222 }
6223
6224 return list;
6225 }
6226
6227 /* Return an attribute list that is the union of a1 and a2. */
6228
6229 tree
6230 merge_attributes (tree a1, tree a2)
6231 {
6232 tree attributes;
6233
6234 /* Either one unset? Take the set one. */
6235
6236 if ((attributes = a1) == 0)
6237 attributes = a2;
6238
6239 /* One that completely contains the other? Take it. */
6240
6241 else if (a2 != 0 && ! attribute_list_contained (a1, a2))
6242 {
6243 if (attribute_list_contained (a2, a1))
6244 attributes = a2;
6245 else
6246 {
6247 /* Pick the longest list, and hang on the other list. */
6248
6249 if (list_length (a1) < list_length (a2))
6250 attributes = a2, a2 = a1;
6251
6252 for (; a2 != 0; a2 = TREE_CHAIN (a2))
6253 {
6254 tree a;
6255 for (a = lookup_ident_attribute (get_attribute_name (a2),
6256 attributes);
6257 a != NULL_TREE && !attribute_value_equal (a, a2);
6258 a = lookup_ident_attribute (get_attribute_name (a2),
6259 TREE_CHAIN (a)))
6260 ;
6261 if (a == NULL_TREE)
6262 {
6263 a1 = copy_node (a2);
6264 TREE_CHAIN (a1) = attributes;
6265 attributes = a1;
6266 }
6267 }
6268 }
6269 }
6270 return attributes;
6271 }
6272
6273 /* Given types T1 and T2, merge their attributes and return
6274 the result. */
6275
6276 tree
6277 merge_type_attributes (tree t1, tree t2)
6278 {
6279 return merge_attributes (TYPE_ATTRIBUTES (t1),
6280 TYPE_ATTRIBUTES (t2));
6281 }
6282
6283 /* Given decls OLDDECL and NEWDECL, merge their attributes and return
6284 the result. */
6285
6286 tree
6287 merge_decl_attributes (tree olddecl, tree newdecl)
6288 {
6289 return merge_attributes (DECL_ATTRIBUTES (olddecl),
6290 DECL_ATTRIBUTES (newdecl));
6291 }
6292
6293 #if TARGET_DLLIMPORT_DECL_ATTRIBUTES
6294
6295 /* Specialization of merge_decl_attributes for various Windows targets.
6296
6297 This handles the following situation:
6298
6299 __declspec (dllimport) int foo;
6300 int foo;
6301
6302 The second instance of `foo' nullifies the dllimport. */
6303
6304 tree
6305 merge_dllimport_decl_attributes (tree old, tree new_tree)
6306 {
6307 tree a;
6308 int delete_dllimport_p = 1;
6309
6310 /* What we need to do here is remove from `old' dllimport if it doesn't
6311 appear in `new'. dllimport behaves like extern: if a declaration is
6312 marked dllimport and a definition appears later, then the object
6313 is not dllimport'd. We also remove a `new' dllimport if the old list
6314 contains dllexport: dllexport always overrides dllimport, regardless
6315 of the order of declaration. */
6316 if (!VAR_OR_FUNCTION_DECL_P (new_tree))
6317 delete_dllimport_p = 0;
6318 else if (DECL_DLLIMPORT_P (new_tree)
6319 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old)))
6320 {
6321 DECL_DLLIMPORT_P (new_tree) = 0;
6322 warning (OPT_Wattributes, "%q+D already declared with dllexport attribute: "
6323 "dllimport ignored", new_tree);
6324 }
6325 else if (DECL_DLLIMPORT_P (old) && !DECL_DLLIMPORT_P (new_tree))
6326 {
6327 /* Warn about overriding a symbol that has already been used, e.g.:
6328 extern int __attribute__ ((dllimport)) foo;
6329 int* bar () {return &foo;}
6330 int foo;
6331 */
6332 if (TREE_USED (old))
6333 {
6334 warning (0, "%q+D redeclared without dllimport attribute "
6335 "after being referenced with dll linkage", new_tree);
6336 /* If we have used a variable's address with dllimport linkage,
6337 keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the
6338 decl may already have had TREE_CONSTANT computed.
6339 We still remove the attribute so that assembler code refers
6340 to '&foo rather than '_imp__foo'. */
6341 if (TREE_CODE (old) == VAR_DECL && TREE_ADDRESSABLE (old))
6342 DECL_DLLIMPORT_P (new_tree) = 1;
6343 }
6344
6345 /* Let an inline definition silently override the external reference,
6346 but otherwise warn about attribute inconsistency. */
6347 else if (TREE_CODE (new_tree) == VAR_DECL
6348 || !DECL_DECLARED_INLINE_P (new_tree))
6349 warning (OPT_Wattributes, "%q+D redeclared without dllimport attribute: "
6350 "previous dllimport ignored", new_tree);
6351 }
6352 else
6353 delete_dllimport_p = 0;
6354
6355 a = merge_attributes (DECL_ATTRIBUTES (old), DECL_ATTRIBUTES (new_tree));
6356
6357 if (delete_dllimport_p)
6358 a = remove_attribute ("dllimport", a);
6359
6360 return a;
6361 }
6362
6363 /* Handle a "dllimport" or "dllexport" attribute; arguments as in
6364 struct attribute_spec.handler. */
6365
6366 tree
6367 handle_dll_attribute (tree * pnode, tree name, tree args, int flags,
6368 bool *no_add_attrs)
6369 {
6370 tree node = *pnode;
6371 bool is_dllimport;
6372
6373 /* These attributes may apply to structure and union types being created,
6374 but otherwise should pass to the declaration involved. */
6375 if (!DECL_P (node))
6376 {
6377 if (flags & ((int) ATTR_FLAG_DECL_NEXT | (int) ATTR_FLAG_FUNCTION_NEXT
6378 | (int) ATTR_FLAG_ARRAY_NEXT))
6379 {
6380 *no_add_attrs = true;
6381 return tree_cons (name, args, NULL_TREE);
6382 }
6383 if (TREE_CODE (node) == RECORD_TYPE
6384 || TREE_CODE (node) == UNION_TYPE)
6385 {
6386 node = TYPE_NAME (node);
6387 if (!node)
6388 return NULL_TREE;
6389 }
6390 else
6391 {
6392 warning (OPT_Wattributes, "%qE attribute ignored",
6393 name);
6394 *no_add_attrs = true;
6395 return NULL_TREE;
6396 }
6397 }
6398
6399 if (TREE_CODE (node) != FUNCTION_DECL
6400 && TREE_CODE (node) != VAR_DECL
6401 && TREE_CODE (node) != TYPE_DECL)
6402 {
6403 *no_add_attrs = true;
6404 warning (OPT_Wattributes, "%qE attribute ignored",
6405 name);
6406 return NULL_TREE;
6407 }
6408
6409 if (TREE_CODE (node) == TYPE_DECL
6410 && TREE_CODE (TREE_TYPE (node)) != RECORD_TYPE
6411 && TREE_CODE (TREE_TYPE (node)) != UNION_TYPE)
6412 {
6413 *no_add_attrs = true;
6414 warning (OPT_Wattributes, "%qE attribute ignored",
6415 name);
6416 return NULL_TREE;
6417 }
6418
6419 is_dllimport = is_attribute_p ("dllimport", name);
6420
6421 /* Report error on dllimport ambiguities seen now before they cause
6422 any damage. */
6423 if (is_dllimport)
6424 {
6425 /* Honor any target-specific overrides. */
6426 if (!targetm.valid_dllimport_attribute_p (node))
6427 *no_add_attrs = true;
6428
6429 else if (TREE_CODE (node) == FUNCTION_DECL
6430 && DECL_DECLARED_INLINE_P (node))
6431 {
6432 warning (OPT_Wattributes, "inline function %q+D declared as "
6433 " dllimport: attribute ignored", node);
6434 *no_add_attrs = true;
6435 }
6436 /* Like MS, treat definition of dllimported variables and
6437 non-inlined functions on declaration as syntax errors. */
6438 else if (TREE_CODE (node) == FUNCTION_DECL && DECL_INITIAL (node))
6439 {
6440 error ("function %q+D definition is marked dllimport", node);
6441 *no_add_attrs = true;
6442 }
6443
6444 else if (TREE_CODE (node) == VAR_DECL)
6445 {
6446 if (DECL_INITIAL (node))
6447 {
6448 error ("variable %q+D definition is marked dllimport",
6449 node);
6450 *no_add_attrs = true;
6451 }
6452
6453 /* `extern' needn't be specified with dllimport.
6454 Specify `extern' now and hope for the best. Sigh. */
6455 DECL_EXTERNAL (node) = 1;
6456 /* Also, implicitly give dllimport'd variables declared within
6457 a function global scope, unless declared static. */
6458 if (current_function_decl != NULL_TREE && !TREE_STATIC (node))
6459 TREE_PUBLIC (node) = 1;
6460 }
6461
6462 if (*no_add_attrs == false)
6463 DECL_DLLIMPORT_P (node) = 1;
6464 }
6465 else if (TREE_CODE (node) == FUNCTION_DECL
6466 && DECL_DECLARED_INLINE_P (node)
6467 && flag_keep_inline_dllexport)
6468 /* An exported function, even if inline, must be emitted. */
6469 DECL_EXTERNAL (node) = 0;
6470
6471 /* Report error if symbol is not accessible at global scope. */
6472 if (!TREE_PUBLIC (node)
6473 && (TREE_CODE (node) == VAR_DECL
6474 || TREE_CODE (node) == FUNCTION_DECL))
6475 {
6476 error ("external linkage required for symbol %q+D because of "
6477 "%qE attribute", node, name);
6478 *no_add_attrs = true;
6479 }
6480
6481 /* A dllexport'd entity must have default visibility so that other
6482 program units (shared libraries or the main executable) can see
6483 it. A dllimport'd entity must have default visibility so that
6484 the linker knows that undefined references within this program
6485 unit can be resolved by the dynamic linker. */
6486 if (!*no_add_attrs)
6487 {
6488 if (DECL_VISIBILITY_SPECIFIED (node)
6489 && DECL_VISIBILITY (node) != VISIBILITY_DEFAULT)
6490 error ("%qE implies default visibility, but %qD has already "
6491 "been declared with a different visibility",
6492 name, node);
6493 DECL_VISIBILITY (node) = VISIBILITY_DEFAULT;
6494 DECL_VISIBILITY_SPECIFIED (node) = 1;
6495 }
6496
6497 return NULL_TREE;
6498 }
6499
6500 #endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */
6501 \f
6502 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6503 of the various TYPE_QUAL values. */
6504
6505 static void
6506 set_type_quals (tree type, int type_quals)
6507 {
6508 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6509 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6510 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6511 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6512 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6513 }
6514
6515 /* Returns true iff unqualified CAND and BASE are equivalent. */
6516
6517 bool
6518 check_base_type (const_tree cand, const_tree base)
6519 {
6520 return (TYPE_NAME (cand) == TYPE_NAME (base)
6521 /* Apparently this is needed for Objective-C. */
6522 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6523 /* Check alignment. */
6524 && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
6525 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6526 TYPE_ATTRIBUTES (base)));
6527 }
6528
6529 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6530
6531 bool
6532 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6533 {
6534 return (TYPE_QUALS (cand) == type_quals
6535 && check_base_type (cand, base));
6536 }
6537
6538 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6539
6540 static bool
6541 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6542 {
6543 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6544 && TYPE_NAME (cand) == TYPE_NAME (base)
6545 /* Apparently this is needed for Objective-C. */
6546 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6547 /* Check alignment. */
6548 && TYPE_ALIGN (cand) == align
6549 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6550 TYPE_ATTRIBUTES (base)));
6551 }
6552
6553 /* This function checks to see if TYPE matches the size one of the built-in
6554 atomic types, and returns that core atomic type. */
6555
6556 static tree
6557 find_atomic_core_type (tree type)
6558 {
6559 tree base_atomic_type;
6560
6561 /* Only handle complete types. */
6562 if (TYPE_SIZE (type) == NULL_TREE)
6563 return NULL_TREE;
6564
6565 HOST_WIDE_INT type_size = tree_to_uhwi (TYPE_SIZE (type));
6566 switch (type_size)
6567 {
6568 case 8:
6569 base_atomic_type = atomicQI_type_node;
6570 break;
6571
6572 case 16:
6573 base_atomic_type = atomicHI_type_node;
6574 break;
6575
6576 case 32:
6577 base_atomic_type = atomicSI_type_node;
6578 break;
6579
6580 case 64:
6581 base_atomic_type = atomicDI_type_node;
6582 break;
6583
6584 case 128:
6585 base_atomic_type = atomicTI_type_node;
6586 break;
6587
6588 default:
6589 base_atomic_type = NULL_TREE;
6590 }
6591
6592 return base_atomic_type;
6593 }
6594
6595 /* Return a version of the TYPE, qualified as indicated by the
6596 TYPE_QUALS, if one exists. If no qualified version exists yet,
6597 return NULL_TREE. */
6598
6599 tree
6600 get_qualified_type (tree type, int type_quals)
6601 {
6602 tree t;
6603
6604 if (TYPE_QUALS (type) == type_quals)
6605 return type;
6606
6607 /* Search the chain of variants to see if there is already one there just
6608 like the one we need to have. If so, use that existing one. We must
6609 preserve the TYPE_NAME, since there is code that depends on this. */
6610 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6611 if (check_qualified_type (t, type, type_quals))
6612 return t;
6613
6614 return NULL_TREE;
6615 }
6616
6617 /* Like get_qualified_type, but creates the type if it does not
6618 exist. This function never returns NULL_TREE. */
6619
6620 tree
6621 build_qualified_type (tree type, int type_quals)
6622 {
6623 tree t;
6624
6625 /* See if we already have the appropriate qualified variant. */
6626 t = get_qualified_type (type, type_quals);
6627
6628 /* If not, build it. */
6629 if (!t)
6630 {
6631 t = build_variant_type_copy (type);
6632 set_type_quals (t, type_quals);
6633
6634 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6635 {
6636 /* See if this object can map to a basic atomic type. */
6637 tree atomic_type = find_atomic_core_type (type);
6638 if (atomic_type)
6639 {
6640 /* Ensure the alignment of this type is compatible with
6641 the required alignment of the atomic type. */
6642 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6643 SET_TYPE_ALIGN (t, TYPE_ALIGN (atomic_type));
6644 }
6645 }
6646
6647 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6648 /* Propagate structural equality. */
6649 SET_TYPE_STRUCTURAL_EQUALITY (t);
6650 else if (TYPE_CANONICAL (type) != type)
6651 /* Build the underlying canonical type, since it is different
6652 from TYPE. */
6653 {
6654 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6655 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6656 }
6657 else
6658 /* T is its own canonical type. */
6659 TYPE_CANONICAL (t) = t;
6660
6661 }
6662
6663 return t;
6664 }
6665
6666 /* Create a variant of type T with alignment ALIGN. */
6667
6668 tree
6669 build_aligned_type (tree type, unsigned int align)
6670 {
6671 tree t;
6672
6673 if (TYPE_PACKED (type)
6674 || TYPE_ALIGN (type) == align)
6675 return type;
6676
6677 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6678 if (check_aligned_type (t, type, align))
6679 return t;
6680
6681 t = build_variant_type_copy (type);
6682 SET_TYPE_ALIGN (t, align);
6683
6684 return t;
6685 }
6686
6687 /* Create a new distinct copy of TYPE. The new type is made its own
6688 MAIN_VARIANT. If TYPE requires structural equality checks, the
6689 resulting type requires structural equality checks; otherwise, its
6690 TYPE_CANONICAL points to itself. */
6691
6692 tree
6693 build_distinct_type_copy (tree type)
6694 {
6695 tree t = copy_node (type);
6696
6697 TYPE_POINTER_TO (t) = 0;
6698 TYPE_REFERENCE_TO (t) = 0;
6699
6700 /* Set the canonical type either to a new equivalence class, or
6701 propagate the need for structural equality checks. */
6702 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6703 SET_TYPE_STRUCTURAL_EQUALITY (t);
6704 else
6705 TYPE_CANONICAL (t) = t;
6706
6707 /* Make it its own variant. */
6708 TYPE_MAIN_VARIANT (t) = t;
6709 TYPE_NEXT_VARIANT (t) = 0;
6710
6711 /* We do not record methods in type copies nor variants
6712 so we do not need to keep them up to date when new method
6713 is inserted. */
6714 if (RECORD_OR_UNION_TYPE_P (t))
6715 TYPE_METHODS (t) = NULL_TREE;
6716
6717 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6718 whose TREE_TYPE is not t. This can also happen in the Ada
6719 frontend when using subtypes. */
6720
6721 return t;
6722 }
6723
6724 /* Create a new variant of TYPE, equivalent but distinct. This is so
6725 the caller can modify it. TYPE_CANONICAL for the return type will
6726 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6727 are considered equal by the language itself (or that both types
6728 require structural equality checks). */
6729
6730 tree
6731 build_variant_type_copy (tree type)
6732 {
6733 tree t, m = TYPE_MAIN_VARIANT (type);
6734
6735 t = build_distinct_type_copy (type);
6736
6737 /* Since we're building a variant, assume that it is a non-semantic
6738 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6739 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6740 /* Type variants have no alias set defined. */
6741 TYPE_ALIAS_SET (t) = -1;
6742
6743 /* Add the new type to the chain of variants of TYPE. */
6744 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6745 TYPE_NEXT_VARIANT (m) = t;
6746 TYPE_MAIN_VARIANT (t) = m;
6747
6748 return t;
6749 }
6750 \f
6751 /* Return true if the from tree in both tree maps are equal. */
6752
6753 int
6754 tree_map_base_eq (const void *va, const void *vb)
6755 {
6756 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6757 *const b = (const struct tree_map_base *) vb;
6758 return (a->from == b->from);
6759 }
6760
6761 /* Hash a from tree in a tree_base_map. */
6762
6763 unsigned int
6764 tree_map_base_hash (const void *item)
6765 {
6766 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6767 }
6768
6769 /* Return true if this tree map structure is marked for garbage collection
6770 purposes. We simply return true if the from tree is marked, so that this
6771 structure goes away when the from tree goes away. */
6772
6773 int
6774 tree_map_base_marked_p (const void *p)
6775 {
6776 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6777 }
6778
6779 /* Hash a from tree in a tree_map. */
6780
6781 unsigned int
6782 tree_map_hash (const void *item)
6783 {
6784 return (((const struct tree_map *) item)->hash);
6785 }
6786
6787 /* Hash a from tree in a tree_decl_map. */
6788
6789 unsigned int
6790 tree_decl_map_hash (const void *item)
6791 {
6792 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6793 }
6794
6795 /* Return the initialization priority for DECL. */
6796
6797 priority_type
6798 decl_init_priority_lookup (tree decl)
6799 {
6800 symtab_node *snode = symtab_node::get (decl);
6801
6802 if (!snode)
6803 return DEFAULT_INIT_PRIORITY;
6804 return
6805 snode->get_init_priority ();
6806 }
6807
6808 /* Return the finalization priority for DECL. */
6809
6810 priority_type
6811 decl_fini_priority_lookup (tree decl)
6812 {
6813 cgraph_node *node = cgraph_node::get (decl);
6814
6815 if (!node)
6816 return DEFAULT_INIT_PRIORITY;
6817 return
6818 node->get_fini_priority ();
6819 }
6820
6821 /* Set the initialization priority for DECL to PRIORITY. */
6822
6823 void
6824 decl_init_priority_insert (tree decl, priority_type priority)
6825 {
6826 struct symtab_node *snode;
6827
6828 if (priority == DEFAULT_INIT_PRIORITY)
6829 {
6830 snode = symtab_node::get (decl);
6831 if (!snode)
6832 return;
6833 }
6834 else if (TREE_CODE (decl) == VAR_DECL)
6835 snode = varpool_node::get_create (decl);
6836 else
6837 snode = cgraph_node::get_create (decl);
6838 snode->set_init_priority (priority);
6839 }
6840
6841 /* Set the finalization priority for DECL to PRIORITY. */
6842
6843 void
6844 decl_fini_priority_insert (tree decl, priority_type priority)
6845 {
6846 struct cgraph_node *node;
6847
6848 if (priority == DEFAULT_INIT_PRIORITY)
6849 {
6850 node = cgraph_node::get (decl);
6851 if (!node)
6852 return;
6853 }
6854 else
6855 node = cgraph_node::get_create (decl);
6856 node->set_fini_priority (priority);
6857 }
6858
6859 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6860
6861 static void
6862 print_debug_expr_statistics (void)
6863 {
6864 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6865 (long) debug_expr_for_decl->size (),
6866 (long) debug_expr_for_decl->elements (),
6867 debug_expr_for_decl->collisions ());
6868 }
6869
6870 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6871
6872 static void
6873 print_value_expr_statistics (void)
6874 {
6875 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6876 (long) value_expr_for_decl->size (),
6877 (long) value_expr_for_decl->elements (),
6878 value_expr_for_decl->collisions ());
6879 }
6880
6881 /* Lookup a debug expression for FROM, and return it if we find one. */
6882
6883 tree
6884 decl_debug_expr_lookup (tree from)
6885 {
6886 struct tree_decl_map *h, in;
6887 in.base.from = from;
6888
6889 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6890 if (h)
6891 return h->to;
6892 return NULL_TREE;
6893 }
6894
6895 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6896
6897 void
6898 decl_debug_expr_insert (tree from, tree to)
6899 {
6900 struct tree_decl_map *h;
6901
6902 h = ggc_alloc<tree_decl_map> ();
6903 h->base.from = from;
6904 h->to = to;
6905 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6906 }
6907
6908 /* Lookup a value expression for FROM, and return it if we find one. */
6909
6910 tree
6911 decl_value_expr_lookup (tree from)
6912 {
6913 struct tree_decl_map *h, in;
6914 in.base.from = from;
6915
6916 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6917 if (h)
6918 return h->to;
6919 return NULL_TREE;
6920 }
6921
6922 /* Insert a mapping FROM->TO in the value expression hashtable. */
6923
6924 void
6925 decl_value_expr_insert (tree from, tree to)
6926 {
6927 struct tree_decl_map *h;
6928
6929 h = ggc_alloc<tree_decl_map> ();
6930 h->base.from = from;
6931 h->to = to;
6932 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6933 }
6934
6935 /* Lookup a vector of debug arguments for FROM, and return it if we
6936 find one. */
6937
6938 vec<tree, va_gc> **
6939 decl_debug_args_lookup (tree from)
6940 {
6941 struct tree_vec_map *h, in;
6942
6943 if (!DECL_HAS_DEBUG_ARGS_P (from))
6944 return NULL;
6945 gcc_checking_assert (debug_args_for_decl != NULL);
6946 in.base.from = from;
6947 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
6948 if (h)
6949 return &h->to;
6950 return NULL;
6951 }
6952
6953 /* Insert a mapping FROM->empty vector of debug arguments in the value
6954 expression hashtable. */
6955
6956 vec<tree, va_gc> **
6957 decl_debug_args_insert (tree from)
6958 {
6959 struct tree_vec_map *h;
6960 tree_vec_map **loc;
6961
6962 if (DECL_HAS_DEBUG_ARGS_P (from))
6963 return decl_debug_args_lookup (from);
6964 if (debug_args_for_decl == NULL)
6965 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
6966 h = ggc_alloc<tree_vec_map> ();
6967 h->base.from = from;
6968 h->to = NULL;
6969 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
6970 *loc = h;
6971 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6972 return &h->to;
6973 }
6974
6975 /* Hashing of types so that we don't make duplicates.
6976 The entry point is `type_hash_canon'. */
6977
6978 /* Compute a hash code for a list of types (chain of TREE_LIST nodes
6979 with types in the TREE_VALUE slots), by adding the hash codes
6980 of the individual types. */
6981
6982 static void
6983 type_hash_list (const_tree list, inchash::hash &hstate)
6984 {
6985 const_tree tail;
6986
6987 for (tail = list; tail; tail = TREE_CHAIN (tail))
6988 if (TREE_VALUE (tail) != error_mark_node)
6989 hstate.add_object (TYPE_HASH (TREE_VALUE (tail)));
6990 }
6991
6992 /* These are the Hashtable callback functions. */
6993
6994 /* Returns true iff the types are equivalent. */
6995
6996 bool
6997 type_cache_hasher::equal (type_hash *a, type_hash *b)
6998 {
6999 /* First test the things that are the same for all types. */
7000 if (a->hash != b->hash
7001 || TREE_CODE (a->type) != TREE_CODE (b->type)
7002 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
7003 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
7004 TYPE_ATTRIBUTES (b->type))
7005 || (TREE_CODE (a->type) != COMPLEX_TYPE
7006 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
7007 return 0;
7008
7009 /* Be careful about comparing arrays before and after the element type
7010 has been completed; don't compare TYPE_ALIGN unless both types are
7011 complete. */
7012 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
7013 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
7014 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
7015 return 0;
7016
7017 switch (TREE_CODE (a->type))
7018 {
7019 case VOID_TYPE:
7020 case COMPLEX_TYPE:
7021 case POINTER_TYPE:
7022 case REFERENCE_TYPE:
7023 case NULLPTR_TYPE:
7024 return 1;
7025
7026 case VECTOR_TYPE:
7027 return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type);
7028
7029 case ENUMERAL_TYPE:
7030 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
7031 && !(TYPE_VALUES (a->type)
7032 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
7033 && TYPE_VALUES (b->type)
7034 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
7035 && type_list_equal (TYPE_VALUES (a->type),
7036 TYPE_VALUES (b->type))))
7037 return 0;
7038
7039 /* ... fall through ... */
7040
7041 case INTEGER_TYPE:
7042 case REAL_TYPE:
7043 case BOOLEAN_TYPE:
7044 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
7045 return false;
7046 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
7047 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
7048 TYPE_MAX_VALUE (b->type)))
7049 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
7050 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
7051 TYPE_MIN_VALUE (b->type))));
7052
7053 case FIXED_POINT_TYPE:
7054 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
7055
7056 case OFFSET_TYPE:
7057 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
7058
7059 case METHOD_TYPE:
7060 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
7061 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
7062 || (TYPE_ARG_TYPES (a->type)
7063 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
7064 && TYPE_ARG_TYPES (b->type)
7065 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
7066 && type_list_equal (TYPE_ARG_TYPES (a->type),
7067 TYPE_ARG_TYPES (b->type)))))
7068 break;
7069 return 0;
7070 case ARRAY_TYPE:
7071 return TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type);
7072
7073 case RECORD_TYPE:
7074 case UNION_TYPE:
7075 case QUAL_UNION_TYPE:
7076 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
7077 || (TYPE_FIELDS (a->type)
7078 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
7079 && TYPE_FIELDS (b->type)
7080 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
7081 && type_list_equal (TYPE_FIELDS (a->type),
7082 TYPE_FIELDS (b->type))));
7083
7084 case FUNCTION_TYPE:
7085 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
7086 || (TYPE_ARG_TYPES (a->type)
7087 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
7088 && TYPE_ARG_TYPES (b->type)
7089 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
7090 && type_list_equal (TYPE_ARG_TYPES (a->type),
7091 TYPE_ARG_TYPES (b->type))))
7092 break;
7093 return 0;
7094
7095 default:
7096 return 0;
7097 }
7098
7099 if (lang_hooks.types.type_hash_eq != NULL)
7100 return lang_hooks.types.type_hash_eq (a->type, b->type);
7101
7102 return 1;
7103 }
7104
7105 /* Given TYPE, and HASHCODE its hash code, return the canonical
7106 object for an identical type if one already exists.
7107 Otherwise, return TYPE, and record it as the canonical object.
7108
7109 To use this function, first create a type of the sort you want.
7110 Then compute its hash code from the fields of the type that
7111 make it different from other similar types.
7112 Then call this function and use the value. */
7113
7114 tree
7115 type_hash_canon (unsigned int hashcode, tree type)
7116 {
7117 type_hash in;
7118 type_hash **loc;
7119
7120 /* The hash table only contains main variants, so ensure that's what we're
7121 being passed. */
7122 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
7123
7124 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
7125 must call that routine before comparing TYPE_ALIGNs. */
7126 layout_type (type);
7127
7128 in.hash = hashcode;
7129 in.type = type;
7130
7131 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
7132 if (*loc)
7133 {
7134 tree t1 = ((type_hash *) *loc)->type;
7135 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1);
7136 free_node (type);
7137 return t1;
7138 }
7139 else
7140 {
7141 struct type_hash *h;
7142
7143 h = ggc_alloc<type_hash> ();
7144 h->hash = hashcode;
7145 h->type = type;
7146 *loc = h;
7147
7148 return type;
7149 }
7150 }
7151
7152 static void
7153 print_type_hash_statistics (void)
7154 {
7155 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
7156 (long) type_hash_table->size (),
7157 (long) type_hash_table->elements (),
7158 type_hash_table->collisions ());
7159 }
7160
7161 /* Compute a hash code for a list of attributes (chain of TREE_LIST nodes
7162 with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots),
7163 by adding the hash codes of the individual attributes. */
7164
7165 static void
7166 attribute_hash_list (const_tree list, inchash::hash &hstate)
7167 {
7168 const_tree tail;
7169
7170 for (tail = list; tail; tail = TREE_CHAIN (tail))
7171 /* ??? Do we want to add in TREE_VALUE too? */
7172 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (tail)));
7173 }
7174
7175 /* Given two lists of attributes, return true if list l2 is
7176 equivalent to l1. */
7177
7178 int
7179 attribute_list_equal (const_tree l1, const_tree l2)
7180 {
7181 if (l1 == l2)
7182 return 1;
7183
7184 return attribute_list_contained (l1, l2)
7185 && attribute_list_contained (l2, l1);
7186 }
7187
7188 /* Given two lists of attributes, return true if list L2 is
7189 completely contained within L1. */
7190 /* ??? This would be faster if attribute names were stored in a canonicalized
7191 form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method
7192 must be used to show these elements are equivalent (which they are). */
7193 /* ??? It's not clear that attributes with arguments will always be handled
7194 correctly. */
7195
7196 int
7197 attribute_list_contained (const_tree l1, const_tree l2)
7198 {
7199 const_tree t1, t2;
7200
7201 /* First check the obvious, maybe the lists are identical. */
7202 if (l1 == l2)
7203 return 1;
7204
7205 /* Maybe the lists are similar. */
7206 for (t1 = l1, t2 = l2;
7207 t1 != 0 && t2 != 0
7208 && get_attribute_name (t1) == get_attribute_name (t2)
7209 && TREE_VALUE (t1) == TREE_VALUE (t2);
7210 t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7211 ;
7212
7213 /* Maybe the lists are equal. */
7214 if (t1 == 0 && t2 == 0)
7215 return 1;
7216
7217 for (; t2 != 0; t2 = TREE_CHAIN (t2))
7218 {
7219 const_tree attr;
7220 /* This CONST_CAST is okay because lookup_attribute does not
7221 modify its argument and the return value is assigned to a
7222 const_tree. */
7223 for (attr = lookup_ident_attribute (get_attribute_name (t2),
7224 CONST_CAST_TREE (l1));
7225 attr != NULL_TREE && !attribute_value_equal (t2, attr);
7226 attr = lookup_ident_attribute (get_attribute_name (t2),
7227 TREE_CHAIN (attr)))
7228 ;
7229
7230 if (attr == NULL_TREE)
7231 return 0;
7232 }
7233
7234 return 1;
7235 }
7236
7237 /* Given two lists of types
7238 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
7239 return 1 if the lists contain the same types in the same order.
7240 Also, the TREE_PURPOSEs must match. */
7241
7242 int
7243 type_list_equal (const_tree l1, const_tree l2)
7244 {
7245 const_tree t1, t2;
7246
7247 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7248 if (TREE_VALUE (t1) != TREE_VALUE (t2)
7249 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
7250 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
7251 && (TREE_TYPE (TREE_PURPOSE (t1))
7252 == TREE_TYPE (TREE_PURPOSE (t2))))))
7253 return 0;
7254
7255 return t1 == t2;
7256 }
7257
7258 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
7259 given by TYPE. If the argument list accepts variable arguments,
7260 then this function counts only the ordinary arguments. */
7261
7262 int
7263 type_num_arguments (const_tree type)
7264 {
7265 int i = 0;
7266 tree t;
7267
7268 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
7269 /* If the function does not take a variable number of arguments,
7270 the last element in the list will have type `void'. */
7271 if (VOID_TYPE_P (TREE_VALUE (t)))
7272 break;
7273 else
7274 ++i;
7275
7276 return i;
7277 }
7278
7279 /* Nonzero if integer constants T1 and T2
7280 represent the same constant value. */
7281
7282 int
7283 tree_int_cst_equal (const_tree t1, const_tree t2)
7284 {
7285 if (t1 == t2)
7286 return 1;
7287
7288 if (t1 == 0 || t2 == 0)
7289 return 0;
7290
7291 if (TREE_CODE (t1) == INTEGER_CST
7292 && TREE_CODE (t2) == INTEGER_CST
7293 && wi::to_widest (t1) == wi::to_widest (t2))
7294 return 1;
7295
7296 return 0;
7297 }
7298
7299 /* Return true if T is an INTEGER_CST whose numerical value (extended
7300 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
7301
7302 bool
7303 tree_fits_shwi_p (const_tree t)
7304 {
7305 return (t != NULL_TREE
7306 && TREE_CODE (t) == INTEGER_CST
7307 && wi::fits_shwi_p (wi::to_widest (t)));
7308 }
7309
7310 /* Return true if T is an INTEGER_CST whose numerical value (extended
7311 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
7312
7313 bool
7314 tree_fits_uhwi_p (const_tree t)
7315 {
7316 return (t != NULL_TREE
7317 && TREE_CODE (t) == INTEGER_CST
7318 && wi::fits_uhwi_p (wi::to_widest (t)));
7319 }
7320
7321 /* T is an INTEGER_CST whose numerical value (extended according to
7322 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
7323 HOST_WIDE_INT. */
7324
7325 HOST_WIDE_INT
7326 tree_to_shwi (const_tree t)
7327 {
7328 gcc_assert (tree_fits_shwi_p (t));
7329 return TREE_INT_CST_LOW (t);
7330 }
7331
7332 /* T is an INTEGER_CST whose numerical value (extended according to
7333 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7334 HOST_WIDE_INT. */
7335
7336 unsigned HOST_WIDE_INT
7337 tree_to_uhwi (const_tree t)
7338 {
7339 gcc_assert (tree_fits_uhwi_p (t));
7340 return TREE_INT_CST_LOW (t);
7341 }
7342
7343 /* Return the most significant (sign) bit of T. */
7344
7345 int
7346 tree_int_cst_sign_bit (const_tree t)
7347 {
7348 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7349
7350 return wi::extract_uhwi (t, bitno, 1);
7351 }
7352
7353 /* Return an indication of the sign of the integer constant T.
7354 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7355 Note that -1 will never be returned if T's type is unsigned. */
7356
7357 int
7358 tree_int_cst_sgn (const_tree t)
7359 {
7360 if (wi::eq_p (t, 0))
7361 return 0;
7362 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7363 return 1;
7364 else if (wi::neg_p (t))
7365 return -1;
7366 else
7367 return 1;
7368 }
7369
7370 /* Return the minimum number of bits needed to represent VALUE in a
7371 signed or unsigned type, UNSIGNEDP says which. */
7372
7373 unsigned int
7374 tree_int_cst_min_precision (tree value, signop sgn)
7375 {
7376 /* If the value is negative, compute its negative minus 1. The latter
7377 adjustment is because the absolute value of the largest negative value
7378 is one larger than the largest positive value. This is equivalent to
7379 a bit-wise negation, so use that operation instead. */
7380
7381 if (tree_int_cst_sgn (value) < 0)
7382 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7383
7384 /* Return the number of bits needed, taking into account the fact
7385 that we need one more bit for a signed than unsigned type.
7386 If value is 0 or -1, the minimum precision is 1 no matter
7387 whether unsignedp is true or false. */
7388
7389 if (integer_zerop (value))
7390 return 1;
7391 else
7392 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7393 }
7394
7395 /* Return truthvalue of whether T1 is the same tree structure as T2.
7396 Return 1 if they are the same.
7397 Return 0 if they are understandably different.
7398 Return -1 if either contains tree structure not understood by
7399 this function. */
7400
7401 int
7402 simple_cst_equal (const_tree t1, const_tree t2)
7403 {
7404 enum tree_code code1, code2;
7405 int cmp;
7406 int i;
7407
7408 if (t1 == t2)
7409 return 1;
7410 if (t1 == 0 || t2 == 0)
7411 return 0;
7412
7413 code1 = TREE_CODE (t1);
7414 code2 = TREE_CODE (t2);
7415
7416 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7417 {
7418 if (CONVERT_EXPR_CODE_P (code2)
7419 || code2 == NON_LVALUE_EXPR)
7420 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7421 else
7422 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7423 }
7424
7425 else if (CONVERT_EXPR_CODE_P (code2)
7426 || code2 == NON_LVALUE_EXPR)
7427 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7428
7429 if (code1 != code2)
7430 return 0;
7431
7432 switch (code1)
7433 {
7434 case INTEGER_CST:
7435 return wi::to_widest (t1) == wi::to_widest (t2);
7436
7437 case REAL_CST:
7438 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
7439
7440 case FIXED_CST:
7441 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7442
7443 case STRING_CST:
7444 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7445 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7446 TREE_STRING_LENGTH (t1)));
7447
7448 case CONSTRUCTOR:
7449 {
7450 unsigned HOST_WIDE_INT idx;
7451 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7452 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7453
7454 if (vec_safe_length (v1) != vec_safe_length (v2))
7455 return false;
7456
7457 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7458 /* ??? Should we handle also fields here? */
7459 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7460 return false;
7461 return true;
7462 }
7463
7464 case SAVE_EXPR:
7465 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7466
7467 case CALL_EXPR:
7468 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7469 if (cmp <= 0)
7470 return cmp;
7471 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7472 return 0;
7473 {
7474 const_tree arg1, arg2;
7475 const_call_expr_arg_iterator iter1, iter2;
7476 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7477 arg2 = first_const_call_expr_arg (t2, &iter2);
7478 arg1 && arg2;
7479 arg1 = next_const_call_expr_arg (&iter1),
7480 arg2 = next_const_call_expr_arg (&iter2))
7481 {
7482 cmp = simple_cst_equal (arg1, arg2);
7483 if (cmp <= 0)
7484 return cmp;
7485 }
7486 return arg1 == arg2;
7487 }
7488
7489 case TARGET_EXPR:
7490 /* Special case: if either target is an unallocated VAR_DECL,
7491 it means that it's going to be unified with whatever the
7492 TARGET_EXPR is really supposed to initialize, so treat it
7493 as being equivalent to anything. */
7494 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7495 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7496 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7497 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7498 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7499 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7500 cmp = 1;
7501 else
7502 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7503
7504 if (cmp <= 0)
7505 return cmp;
7506
7507 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7508
7509 case WITH_CLEANUP_EXPR:
7510 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7511 if (cmp <= 0)
7512 return cmp;
7513
7514 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7515
7516 case COMPONENT_REF:
7517 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7518 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7519
7520 return 0;
7521
7522 case VAR_DECL:
7523 case PARM_DECL:
7524 case CONST_DECL:
7525 case FUNCTION_DECL:
7526 return 0;
7527
7528 default:
7529 break;
7530 }
7531
7532 /* This general rule works for most tree codes. All exceptions should be
7533 handled above. If this is a language-specific tree code, we can't
7534 trust what might be in the operand, so say we don't know
7535 the situation. */
7536 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7537 return -1;
7538
7539 switch (TREE_CODE_CLASS (code1))
7540 {
7541 case tcc_unary:
7542 case tcc_binary:
7543 case tcc_comparison:
7544 case tcc_expression:
7545 case tcc_reference:
7546 case tcc_statement:
7547 cmp = 1;
7548 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7549 {
7550 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7551 if (cmp <= 0)
7552 return cmp;
7553 }
7554
7555 return cmp;
7556
7557 default:
7558 return -1;
7559 }
7560 }
7561
7562 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7563 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7564 than U, respectively. */
7565
7566 int
7567 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7568 {
7569 if (tree_int_cst_sgn (t) < 0)
7570 return -1;
7571 else if (!tree_fits_uhwi_p (t))
7572 return 1;
7573 else if (TREE_INT_CST_LOW (t) == u)
7574 return 0;
7575 else if (TREE_INT_CST_LOW (t) < u)
7576 return -1;
7577 else
7578 return 1;
7579 }
7580
7581 /* Return true if SIZE represents a constant size that is in bounds of
7582 what the middle-end and the backend accepts (covering not more than
7583 half of the address-space). */
7584
7585 bool
7586 valid_constant_size_p (const_tree size)
7587 {
7588 if (! tree_fits_uhwi_p (size)
7589 || TREE_OVERFLOW (size)
7590 || tree_int_cst_sign_bit (size) != 0)
7591 return false;
7592 return true;
7593 }
7594
7595 /* Return the precision of the type, or for a complex or vector type the
7596 precision of the type of its elements. */
7597
7598 unsigned int
7599 element_precision (const_tree type)
7600 {
7601 if (!TYPE_P (type))
7602 type = TREE_TYPE (type);
7603 enum tree_code code = TREE_CODE (type);
7604 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7605 type = TREE_TYPE (type);
7606
7607 return TYPE_PRECISION (type);
7608 }
7609
7610 /* Return true if CODE represents an associative tree code. Otherwise
7611 return false. */
7612 bool
7613 associative_tree_code (enum tree_code code)
7614 {
7615 switch (code)
7616 {
7617 case BIT_IOR_EXPR:
7618 case BIT_AND_EXPR:
7619 case BIT_XOR_EXPR:
7620 case PLUS_EXPR:
7621 case MULT_EXPR:
7622 case MIN_EXPR:
7623 case MAX_EXPR:
7624 return true;
7625
7626 default:
7627 break;
7628 }
7629 return false;
7630 }
7631
7632 /* Return true if CODE represents a commutative tree code. Otherwise
7633 return false. */
7634 bool
7635 commutative_tree_code (enum tree_code code)
7636 {
7637 switch (code)
7638 {
7639 case PLUS_EXPR:
7640 case MULT_EXPR:
7641 case MULT_HIGHPART_EXPR:
7642 case MIN_EXPR:
7643 case MAX_EXPR:
7644 case BIT_IOR_EXPR:
7645 case BIT_XOR_EXPR:
7646 case BIT_AND_EXPR:
7647 case NE_EXPR:
7648 case EQ_EXPR:
7649 case UNORDERED_EXPR:
7650 case ORDERED_EXPR:
7651 case UNEQ_EXPR:
7652 case LTGT_EXPR:
7653 case TRUTH_AND_EXPR:
7654 case TRUTH_XOR_EXPR:
7655 case TRUTH_OR_EXPR:
7656 case WIDEN_MULT_EXPR:
7657 case VEC_WIDEN_MULT_HI_EXPR:
7658 case VEC_WIDEN_MULT_LO_EXPR:
7659 case VEC_WIDEN_MULT_EVEN_EXPR:
7660 case VEC_WIDEN_MULT_ODD_EXPR:
7661 return true;
7662
7663 default:
7664 break;
7665 }
7666 return false;
7667 }
7668
7669 /* Return true if CODE represents a ternary tree code for which the
7670 first two operands are commutative. Otherwise return false. */
7671 bool
7672 commutative_ternary_tree_code (enum tree_code code)
7673 {
7674 switch (code)
7675 {
7676 case WIDEN_MULT_PLUS_EXPR:
7677 case WIDEN_MULT_MINUS_EXPR:
7678 case DOT_PROD_EXPR:
7679 case FMA_EXPR:
7680 return true;
7681
7682 default:
7683 break;
7684 }
7685 return false;
7686 }
7687
7688 /* Returns true if CODE can overflow. */
7689
7690 bool
7691 operation_can_overflow (enum tree_code code)
7692 {
7693 switch (code)
7694 {
7695 case PLUS_EXPR:
7696 case MINUS_EXPR:
7697 case MULT_EXPR:
7698 case LSHIFT_EXPR:
7699 /* Can overflow in various ways. */
7700 return true;
7701 case TRUNC_DIV_EXPR:
7702 case EXACT_DIV_EXPR:
7703 case FLOOR_DIV_EXPR:
7704 case CEIL_DIV_EXPR:
7705 /* For INT_MIN / -1. */
7706 return true;
7707 case NEGATE_EXPR:
7708 case ABS_EXPR:
7709 /* For -INT_MIN. */
7710 return true;
7711 default:
7712 /* These operators cannot overflow. */
7713 return false;
7714 }
7715 }
7716
7717 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
7718 ftrapv doesn't generate trapping insns for CODE. */
7719
7720 bool
7721 operation_no_trapping_overflow (tree type, enum tree_code code)
7722 {
7723 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type));
7724
7725 /* We don't generate instructions that trap on overflow for complex or vector
7726 types. */
7727 if (!INTEGRAL_TYPE_P (type))
7728 return true;
7729
7730 if (!TYPE_OVERFLOW_TRAPS (type))
7731 return true;
7732
7733 switch (code)
7734 {
7735 case PLUS_EXPR:
7736 case MINUS_EXPR:
7737 case MULT_EXPR:
7738 case NEGATE_EXPR:
7739 case ABS_EXPR:
7740 /* These operators can overflow, and -ftrapv generates trapping code for
7741 these. */
7742 return false;
7743 case TRUNC_DIV_EXPR:
7744 case EXACT_DIV_EXPR:
7745 case FLOOR_DIV_EXPR:
7746 case CEIL_DIV_EXPR:
7747 case LSHIFT_EXPR:
7748 /* These operators can overflow, but -ftrapv does not generate trapping
7749 code for these. */
7750 return true;
7751 default:
7752 /* These operators cannot overflow. */
7753 return true;
7754 }
7755 }
7756
7757 namespace inchash
7758 {
7759
7760 /* Generate a hash value for an expression. This can be used iteratively
7761 by passing a previous result as the HSTATE argument.
7762
7763 This function is intended to produce the same hash for expressions which
7764 would compare equal using operand_equal_p. */
7765 void
7766 add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
7767 {
7768 int i;
7769 enum tree_code code;
7770 enum tree_code_class tclass;
7771
7772 if (t == NULL_TREE)
7773 {
7774 hstate.merge_hash (0);
7775 return;
7776 }
7777
7778 if (!(flags & OEP_ADDRESS_OF))
7779 STRIP_NOPS (t);
7780
7781 code = TREE_CODE (t);
7782
7783 switch (code)
7784 {
7785 /* Alas, constants aren't shared, so we can't rely on pointer
7786 identity. */
7787 case VOID_CST:
7788 hstate.merge_hash (0);
7789 return;
7790 case INTEGER_CST:
7791 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
7792 for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
7793 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
7794 return;
7795 case REAL_CST:
7796 {
7797 unsigned int val2;
7798 if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
7799 val2 = rvc_zero;
7800 else
7801 val2 = real_hash (TREE_REAL_CST_PTR (t));
7802 hstate.merge_hash (val2);
7803 return;
7804 }
7805 case FIXED_CST:
7806 {
7807 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7808 hstate.merge_hash (val2);
7809 return;
7810 }
7811 case STRING_CST:
7812 hstate.add ((const void *) TREE_STRING_POINTER (t),
7813 TREE_STRING_LENGTH (t));
7814 return;
7815 case COMPLEX_CST:
7816 inchash::add_expr (TREE_REALPART (t), hstate, flags);
7817 inchash::add_expr (TREE_IMAGPART (t), hstate, flags);
7818 return;
7819 case VECTOR_CST:
7820 {
7821 unsigned i;
7822 for (i = 0; i < VECTOR_CST_NELTS (t); ++i)
7823 inchash::add_expr (VECTOR_CST_ELT (t, i), hstate, flags);
7824 return;
7825 }
7826 case SSA_NAME:
7827 /* We can just compare by pointer. */
7828 hstate.add_wide_int (SSA_NAME_VERSION (t));
7829 return;
7830 case PLACEHOLDER_EXPR:
7831 /* The node itself doesn't matter. */
7832 return;
7833 case BLOCK:
7834 case OMP_CLAUSE:
7835 /* Ignore. */
7836 return;
7837 case TREE_LIST:
7838 /* A list of expressions, for a CALL_EXPR or as the elements of a
7839 VECTOR_CST. */
7840 for (; t; t = TREE_CHAIN (t))
7841 inchash::add_expr (TREE_VALUE (t), hstate, flags);
7842 return;
7843 case CONSTRUCTOR:
7844 {
7845 unsigned HOST_WIDE_INT idx;
7846 tree field, value;
7847 flags &= ~OEP_ADDRESS_OF;
7848 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7849 {
7850 inchash::add_expr (field, hstate, flags);
7851 inchash::add_expr (value, hstate, flags);
7852 }
7853 return;
7854 }
7855 case STATEMENT_LIST:
7856 {
7857 tree_stmt_iterator i;
7858 for (i = tsi_start (CONST_CAST_TREE (t));
7859 !tsi_end_p (i); tsi_next (&i))
7860 inchash::add_expr (tsi_stmt (i), hstate, flags);
7861 return;
7862 }
7863 case FUNCTION_DECL:
7864 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7865 Otherwise nodes that compare equal according to operand_equal_p might
7866 get different hash codes. However, don't do this for machine specific
7867 or front end builtins, since the function code is overloaded in those
7868 cases. */
7869 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7870 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7871 {
7872 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7873 code = TREE_CODE (t);
7874 }
7875 /* FALL THROUGH */
7876 default:
7877 tclass = TREE_CODE_CLASS (code);
7878
7879 if (tclass == tcc_declaration)
7880 {
7881 /* DECL's have a unique ID */
7882 hstate.add_wide_int (DECL_UID (t));
7883 }
7884 else if (tclass == tcc_comparison && !commutative_tree_code (code))
7885 {
7886 /* For comparisons that can be swapped, use the lower
7887 tree code. */
7888 enum tree_code ccode = swap_tree_comparison (code);
7889 if (code < ccode)
7890 ccode = code;
7891 hstate.add_object (ccode);
7892 inchash::add_expr (TREE_OPERAND (t, ccode != code), hstate, flags);
7893 inchash::add_expr (TREE_OPERAND (t, ccode == code), hstate, flags);
7894 }
7895 else if (CONVERT_EXPR_CODE_P (code))
7896 {
7897 /* NOP_EXPR and CONVERT_EXPR are considered equal by
7898 operand_equal_p. */
7899 enum tree_code ccode = NOP_EXPR;
7900 hstate.add_object (ccode);
7901
7902 /* Don't hash the type, that can lead to having nodes which
7903 compare equal according to operand_equal_p, but which
7904 have different hash codes. Make sure to include signedness
7905 in the hash computation. */
7906 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7907 inchash::add_expr (TREE_OPERAND (t, 0), hstate, flags);
7908 }
7909 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
7910 else if (code == MEM_REF
7911 && (flags & OEP_ADDRESS_OF) != 0
7912 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
7913 && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
7914 && integer_zerop (TREE_OPERAND (t, 1)))
7915 inchash::add_expr (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
7916 hstate, flags);
7917 /* Don't ICE on FE specific trees, or their arguments etc.
7918 during operand_equal_p hash verification. */
7919 else if (!IS_EXPR_CODE_CLASS (tclass))
7920 gcc_assert (flags & OEP_HASH_CHECK);
7921 else
7922 {
7923 unsigned int sflags = flags;
7924
7925 hstate.add_object (code);
7926
7927 switch (code)
7928 {
7929 case ADDR_EXPR:
7930 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
7931 flags |= OEP_ADDRESS_OF;
7932 sflags = flags;
7933 break;
7934
7935 case INDIRECT_REF:
7936 case MEM_REF:
7937 case TARGET_MEM_REF:
7938 flags &= ~OEP_ADDRESS_OF;
7939 sflags = flags;
7940 break;
7941
7942 case ARRAY_REF:
7943 case ARRAY_RANGE_REF:
7944 case COMPONENT_REF:
7945 case BIT_FIELD_REF:
7946 sflags &= ~OEP_ADDRESS_OF;
7947 break;
7948
7949 case COND_EXPR:
7950 flags &= ~OEP_ADDRESS_OF;
7951 break;
7952
7953 case FMA_EXPR:
7954 case WIDEN_MULT_PLUS_EXPR:
7955 case WIDEN_MULT_MINUS_EXPR:
7956 {
7957 /* The multiplication operands are commutative. */
7958 inchash::hash one, two;
7959 inchash::add_expr (TREE_OPERAND (t, 0), one, flags);
7960 inchash::add_expr (TREE_OPERAND (t, 1), two, flags);
7961 hstate.add_commutative (one, two);
7962 inchash::add_expr (TREE_OPERAND (t, 2), two, flags);
7963 return;
7964 }
7965
7966 case CALL_EXPR:
7967 if (CALL_EXPR_FN (t) == NULL_TREE)
7968 hstate.add_int (CALL_EXPR_IFN (t));
7969 break;
7970
7971 case TARGET_EXPR:
7972 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
7973 Usually different TARGET_EXPRs just should use
7974 different temporaries in their slots. */
7975 inchash::add_expr (TARGET_EXPR_SLOT (t), hstate, flags);
7976 return;
7977
7978 default:
7979 break;
7980 }
7981
7982 /* Don't hash the type, that can lead to having nodes which
7983 compare equal according to operand_equal_p, but which
7984 have different hash codes. */
7985 if (code == NON_LVALUE_EXPR)
7986 {
7987 /* Make sure to include signness in the hash computation. */
7988 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7989 inchash::add_expr (TREE_OPERAND (t, 0), hstate, flags);
7990 }
7991
7992 else if (commutative_tree_code (code))
7993 {
7994 /* It's a commutative expression. We want to hash it the same
7995 however it appears. We do this by first hashing both operands
7996 and then rehashing based on the order of their independent
7997 hashes. */
7998 inchash::hash one, two;
7999 inchash::add_expr (TREE_OPERAND (t, 0), one, flags);
8000 inchash::add_expr (TREE_OPERAND (t, 1), two, flags);
8001 hstate.add_commutative (one, two);
8002 }
8003 else
8004 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
8005 inchash::add_expr (TREE_OPERAND (t, i), hstate,
8006 i == 0 ? flags : sflags);
8007 }
8008 return;
8009 }
8010 }
8011
8012 }
8013
8014 /* Constructors for pointer, array and function types.
8015 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
8016 constructed by language-dependent code, not here.) */
8017
8018 /* Construct, lay out and return the type of pointers to TO_TYPE with
8019 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
8020 reference all of memory. If such a type has already been
8021 constructed, reuse it. */
8022
8023 tree
8024 build_pointer_type_for_mode (tree to_type, machine_mode mode,
8025 bool can_alias_all)
8026 {
8027 tree t;
8028 bool could_alias = can_alias_all;
8029
8030 if (to_type == error_mark_node)
8031 return error_mark_node;
8032
8033 /* If the pointed-to type has the may_alias attribute set, force
8034 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
8035 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
8036 can_alias_all = true;
8037
8038 /* In some cases, languages will have things that aren't a POINTER_TYPE
8039 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
8040 In that case, return that type without regard to the rest of our
8041 operands.
8042
8043 ??? This is a kludge, but consistent with the way this function has
8044 always operated and there doesn't seem to be a good way to avoid this
8045 at the moment. */
8046 if (TYPE_POINTER_TO (to_type) != 0
8047 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
8048 return TYPE_POINTER_TO (to_type);
8049
8050 /* First, if we already have a type for pointers to TO_TYPE and it's
8051 the proper mode, use it. */
8052 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
8053 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
8054 return t;
8055
8056 t = make_node (POINTER_TYPE);
8057
8058 TREE_TYPE (t) = to_type;
8059 SET_TYPE_MODE (t, mode);
8060 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
8061 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
8062 TYPE_POINTER_TO (to_type) = t;
8063
8064 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
8065 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
8066 SET_TYPE_STRUCTURAL_EQUALITY (t);
8067 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
8068 TYPE_CANONICAL (t)
8069 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
8070 mode, false);
8071
8072 /* Lay out the type. This function has many callers that are concerned
8073 with expression-construction, and this simplifies them all. */
8074 layout_type (t);
8075
8076 return t;
8077 }
8078
8079 /* By default build pointers in ptr_mode. */
8080
8081 tree
8082 build_pointer_type (tree to_type)
8083 {
8084 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
8085 : TYPE_ADDR_SPACE (to_type);
8086 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
8087 return build_pointer_type_for_mode (to_type, pointer_mode, false);
8088 }
8089
8090 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
8091
8092 tree
8093 build_reference_type_for_mode (tree to_type, machine_mode mode,
8094 bool can_alias_all)
8095 {
8096 tree t;
8097 bool could_alias = can_alias_all;
8098
8099 if (to_type == error_mark_node)
8100 return error_mark_node;
8101
8102 /* If the pointed-to type has the may_alias attribute set, force
8103 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
8104 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
8105 can_alias_all = true;
8106
8107 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
8108 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
8109 In that case, return that type without regard to the rest of our
8110 operands.
8111
8112 ??? This is a kludge, but consistent with the way this function has
8113 always operated and there doesn't seem to be a good way to avoid this
8114 at the moment. */
8115 if (TYPE_REFERENCE_TO (to_type) != 0
8116 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
8117 return TYPE_REFERENCE_TO (to_type);
8118
8119 /* First, if we already have a type for pointers to TO_TYPE and it's
8120 the proper mode, use it. */
8121 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
8122 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
8123 return t;
8124
8125 t = make_node (REFERENCE_TYPE);
8126
8127 TREE_TYPE (t) = to_type;
8128 SET_TYPE_MODE (t, mode);
8129 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
8130 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
8131 TYPE_REFERENCE_TO (to_type) = t;
8132
8133 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
8134 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
8135 SET_TYPE_STRUCTURAL_EQUALITY (t);
8136 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
8137 TYPE_CANONICAL (t)
8138 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
8139 mode, false);
8140
8141 layout_type (t);
8142
8143 return t;
8144 }
8145
8146
8147 /* Build the node for the type of references-to-TO_TYPE by default
8148 in ptr_mode. */
8149
8150 tree
8151 build_reference_type (tree to_type)
8152 {
8153 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
8154 : TYPE_ADDR_SPACE (to_type);
8155 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
8156 return build_reference_type_for_mode (to_type, pointer_mode, false);
8157 }
8158
8159 #define MAX_INT_CACHED_PREC \
8160 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
8161 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
8162
8163 /* Builds a signed or unsigned integer type of precision PRECISION.
8164 Used for C bitfields whose precision does not match that of
8165 built-in target types. */
8166 tree
8167 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
8168 int unsignedp)
8169 {
8170 tree itype, ret;
8171
8172 if (unsignedp)
8173 unsignedp = MAX_INT_CACHED_PREC + 1;
8174
8175 if (precision <= MAX_INT_CACHED_PREC)
8176 {
8177 itype = nonstandard_integer_type_cache[precision + unsignedp];
8178 if (itype)
8179 return itype;
8180 }
8181
8182 itype = make_node (INTEGER_TYPE);
8183 TYPE_PRECISION (itype) = precision;
8184
8185 if (unsignedp)
8186 fixup_unsigned_type (itype);
8187 else
8188 fixup_signed_type (itype);
8189
8190 ret = itype;
8191 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype)))
8192 ret = type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype)), itype);
8193 if (precision <= MAX_INT_CACHED_PREC)
8194 nonstandard_integer_type_cache[precision + unsignedp] = ret;
8195
8196 return ret;
8197 }
8198
8199 #define MAX_BOOL_CACHED_PREC \
8200 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
8201 static GTY(()) tree nonstandard_boolean_type_cache[MAX_BOOL_CACHED_PREC + 1];
8202
8203 /* Builds a boolean type of precision PRECISION.
8204 Used for boolean vectors to choose proper vector element size. */
8205 tree
8206 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision)
8207 {
8208 tree type;
8209
8210 if (precision <= MAX_BOOL_CACHED_PREC)
8211 {
8212 type = nonstandard_boolean_type_cache[precision];
8213 if (type)
8214 return type;
8215 }
8216
8217 type = make_node (BOOLEAN_TYPE);
8218 TYPE_PRECISION (type) = precision;
8219 fixup_signed_type (type);
8220
8221 if (precision <= MAX_INT_CACHED_PREC)
8222 nonstandard_boolean_type_cache[precision] = type;
8223
8224 return type;
8225 }
8226
8227 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
8228 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
8229 is true, reuse such a type that has already been constructed. */
8230
8231 static tree
8232 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
8233 {
8234 tree itype = make_node (INTEGER_TYPE);
8235 inchash::hash hstate;
8236
8237 TREE_TYPE (itype) = type;
8238
8239 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
8240 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
8241
8242 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
8243 SET_TYPE_MODE (itype, TYPE_MODE (type));
8244 TYPE_SIZE (itype) = TYPE_SIZE (type);
8245 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
8246 SET_TYPE_ALIGN (itype, TYPE_ALIGN (type));
8247 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
8248
8249 if (!shared)
8250 return itype;
8251
8252 if ((TYPE_MIN_VALUE (itype)
8253 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
8254 || (TYPE_MAX_VALUE (itype)
8255 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
8256 {
8257 /* Since we cannot reliably merge this type, we need to compare it using
8258 structural equality checks. */
8259 SET_TYPE_STRUCTURAL_EQUALITY (itype);
8260 return itype;
8261 }
8262
8263 inchash::add_expr (TYPE_MIN_VALUE (itype), hstate);
8264 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
8265 hstate.merge_hash (TYPE_HASH (type));
8266 itype = type_hash_canon (hstate.end (), itype);
8267
8268 return itype;
8269 }
8270
8271 /* Wrapper around build_range_type_1 with SHARED set to true. */
8272
8273 tree
8274 build_range_type (tree type, tree lowval, tree highval)
8275 {
8276 return build_range_type_1 (type, lowval, highval, true);
8277 }
8278
8279 /* Wrapper around build_range_type_1 with SHARED set to false. */
8280
8281 tree
8282 build_nonshared_range_type (tree type, tree lowval, tree highval)
8283 {
8284 return build_range_type_1 (type, lowval, highval, false);
8285 }
8286
8287 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
8288 MAXVAL should be the maximum value in the domain
8289 (one less than the length of the array).
8290
8291 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
8292 We don't enforce this limit, that is up to caller (e.g. language front end).
8293 The limit exists because the result is a signed type and we don't handle
8294 sizes that use more than one HOST_WIDE_INT. */
8295
8296 tree
8297 build_index_type (tree maxval)
8298 {
8299 return build_range_type (sizetype, size_zero_node, maxval);
8300 }
8301
8302 /* Return true if the debug information for TYPE, a subtype, should be emitted
8303 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
8304 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
8305 debug info and doesn't reflect the source code. */
8306
8307 bool
8308 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
8309 {
8310 tree base_type = TREE_TYPE (type), low, high;
8311
8312 /* Subrange types have a base type which is an integral type. */
8313 if (!INTEGRAL_TYPE_P (base_type))
8314 return false;
8315
8316 /* Get the real bounds of the subtype. */
8317 if (lang_hooks.types.get_subrange_bounds)
8318 lang_hooks.types.get_subrange_bounds (type, &low, &high);
8319 else
8320 {
8321 low = TYPE_MIN_VALUE (type);
8322 high = TYPE_MAX_VALUE (type);
8323 }
8324
8325 /* If the type and its base type have the same representation and the same
8326 name, then the type is not a subrange but a copy of the base type. */
8327 if ((TREE_CODE (base_type) == INTEGER_TYPE
8328 || TREE_CODE (base_type) == BOOLEAN_TYPE)
8329 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
8330 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
8331 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
8332 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
8333 return false;
8334
8335 if (lowval)
8336 *lowval = low;
8337 if (highval)
8338 *highval = high;
8339 return true;
8340 }
8341
8342 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
8343 and number of elements specified by the range of values of INDEX_TYPE.
8344 If SHARED is true, reuse such a type that has already been constructed. */
8345
8346 static tree
8347 build_array_type_1 (tree elt_type, tree index_type, bool shared)
8348 {
8349 tree t;
8350
8351 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
8352 {
8353 error ("arrays of functions are not meaningful");
8354 elt_type = integer_type_node;
8355 }
8356
8357 t = make_node (ARRAY_TYPE);
8358 TREE_TYPE (t) = elt_type;
8359 TYPE_DOMAIN (t) = index_type;
8360 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
8361 layout_type (t);
8362
8363 /* If the element type is incomplete at this point we get marked for
8364 structural equality. Do not record these types in the canonical
8365 type hashtable. */
8366 if (TYPE_STRUCTURAL_EQUALITY_P (t))
8367 return t;
8368
8369 if (shared)
8370 {
8371 inchash::hash hstate;
8372 hstate.add_object (TYPE_HASH (elt_type));
8373 if (index_type)
8374 hstate.add_object (TYPE_HASH (index_type));
8375 t = type_hash_canon (hstate.end (), t);
8376 }
8377
8378 if (TYPE_CANONICAL (t) == t)
8379 {
8380 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
8381 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type))
8382 || in_lto_p)
8383 SET_TYPE_STRUCTURAL_EQUALITY (t);
8384 else if (TYPE_CANONICAL (elt_type) != elt_type
8385 || (index_type && TYPE_CANONICAL (index_type) != index_type))
8386 TYPE_CANONICAL (t)
8387 = build_array_type_1 (TYPE_CANONICAL (elt_type),
8388 index_type
8389 ? TYPE_CANONICAL (index_type) : NULL_TREE,
8390 shared);
8391 }
8392
8393 return t;
8394 }
8395
8396 /* Wrapper around build_array_type_1 with SHARED set to true. */
8397
8398 tree
8399 build_array_type (tree elt_type, tree index_type)
8400 {
8401 return build_array_type_1 (elt_type, index_type, true);
8402 }
8403
8404 /* Wrapper around build_array_type_1 with SHARED set to false. */
8405
8406 tree
8407 build_nonshared_array_type (tree elt_type, tree index_type)
8408 {
8409 return build_array_type_1 (elt_type, index_type, false);
8410 }
8411
8412 /* Return a representation of ELT_TYPE[NELTS], using indices of type
8413 sizetype. */
8414
8415 tree
8416 build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
8417 {
8418 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
8419 }
8420
8421 /* Recursively examines the array elements of TYPE, until a non-array
8422 element type is found. */
8423
8424 tree
8425 strip_array_types (tree type)
8426 {
8427 while (TREE_CODE (type) == ARRAY_TYPE)
8428 type = TREE_TYPE (type);
8429
8430 return type;
8431 }
8432
8433 /* Computes the canonical argument types from the argument type list
8434 ARGTYPES.
8435
8436 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
8437 on entry to this function, or if any of the ARGTYPES are
8438 structural.
8439
8440 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
8441 true on entry to this function, or if any of the ARGTYPES are
8442 non-canonical.
8443
8444 Returns a canonical argument list, which may be ARGTYPES when the
8445 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
8446 true) or would not differ from ARGTYPES. */
8447
8448 static tree
8449 maybe_canonicalize_argtypes (tree argtypes,
8450 bool *any_structural_p,
8451 bool *any_noncanonical_p)
8452 {
8453 tree arg;
8454 bool any_noncanonical_argtypes_p = false;
8455
8456 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
8457 {
8458 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
8459 /* Fail gracefully by stating that the type is structural. */
8460 *any_structural_p = true;
8461 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
8462 *any_structural_p = true;
8463 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
8464 || TREE_PURPOSE (arg))
8465 /* If the argument has a default argument, we consider it
8466 non-canonical even though the type itself is canonical.
8467 That way, different variants of function and method types
8468 with default arguments will all point to the variant with
8469 no defaults as their canonical type. */
8470 any_noncanonical_argtypes_p = true;
8471 }
8472
8473 if (*any_structural_p)
8474 return argtypes;
8475
8476 if (any_noncanonical_argtypes_p)
8477 {
8478 /* Build the canonical list of argument types. */
8479 tree canon_argtypes = NULL_TREE;
8480 bool is_void = false;
8481
8482 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
8483 {
8484 if (arg == void_list_node)
8485 is_void = true;
8486 else
8487 canon_argtypes = tree_cons (NULL_TREE,
8488 TYPE_CANONICAL (TREE_VALUE (arg)),
8489 canon_argtypes);
8490 }
8491
8492 canon_argtypes = nreverse (canon_argtypes);
8493 if (is_void)
8494 canon_argtypes = chainon (canon_argtypes, void_list_node);
8495
8496 /* There is a non-canonical type. */
8497 *any_noncanonical_p = true;
8498 return canon_argtypes;
8499 }
8500
8501 /* The canonical argument types are the same as ARGTYPES. */
8502 return argtypes;
8503 }
8504
8505 /* Construct, lay out and return
8506 the type of functions returning type VALUE_TYPE
8507 given arguments of types ARG_TYPES.
8508 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
8509 are data type nodes for the arguments of the function.
8510 If such a type has already been constructed, reuse it. */
8511
8512 tree
8513 build_function_type (tree value_type, tree arg_types)
8514 {
8515 tree t;
8516 inchash::hash hstate;
8517 bool any_structural_p, any_noncanonical_p;
8518 tree canon_argtypes;
8519
8520 if (TREE_CODE (value_type) == FUNCTION_TYPE)
8521 {
8522 error ("function return type cannot be function");
8523 value_type = integer_type_node;
8524 }
8525
8526 /* Make a node of the sort we want. */
8527 t = make_node (FUNCTION_TYPE);
8528 TREE_TYPE (t) = value_type;
8529 TYPE_ARG_TYPES (t) = arg_types;
8530
8531 /* If we already have such a type, use the old one. */
8532 hstate.add_object (TYPE_HASH (value_type));
8533 type_hash_list (arg_types, hstate);
8534 t = type_hash_canon (hstate.end (), t);
8535
8536 /* Set up the canonical type. */
8537 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8538 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8539 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8540 &any_structural_p,
8541 &any_noncanonical_p);
8542 if (any_structural_p)
8543 SET_TYPE_STRUCTURAL_EQUALITY (t);
8544 else if (any_noncanonical_p)
8545 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8546 canon_argtypes);
8547
8548 if (!COMPLETE_TYPE_P (t))
8549 layout_type (t);
8550 return t;
8551 }
8552
8553 /* Build a function type. The RETURN_TYPE is the type returned by the
8554 function. If VAARGS is set, no void_type_node is appended to the
8555 list. ARGP must be always be terminated be a NULL_TREE. */
8556
8557 static tree
8558 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8559 {
8560 tree t, args, last;
8561
8562 t = va_arg (argp, tree);
8563 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8564 args = tree_cons (NULL_TREE, t, args);
8565
8566 if (vaargs)
8567 {
8568 last = args;
8569 if (args != NULL_TREE)
8570 args = nreverse (args);
8571 gcc_assert (last != void_list_node);
8572 }
8573 else if (args == NULL_TREE)
8574 args = void_list_node;
8575 else
8576 {
8577 last = args;
8578 args = nreverse (args);
8579 TREE_CHAIN (last) = void_list_node;
8580 }
8581 args = build_function_type (return_type, args);
8582
8583 return args;
8584 }
8585
8586 /* Build a function type. The RETURN_TYPE is the type returned by the
8587 function. If additional arguments are provided, they are
8588 additional argument types. The list of argument types must always
8589 be terminated by NULL_TREE. */
8590
8591 tree
8592 build_function_type_list (tree return_type, ...)
8593 {
8594 tree args;
8595 va_list p;
8596
8597 va_start (p, return_type);
8598 args = build_function_type_list_1 (false, return_type, p);
8599 va_end (p);
8600 return args;
8601 }
8602
8603 /* Build a variable argument function type. The RETURN_TYPE is the
8604 type returned by the function. If additional arguments are provided,
8605 they are additional argument types. The list of argument types must
8606 always be terminated by NULL_TREE. */
8607
8608 tree
8609 build_varargs_function_type_list (tree return_type, ...)
8610 {
8611 tree args;
8612 va_list p;
8613
8614 va_start (p, return_type);
8615 args = build_function_type_list_1 (true, return_type, p);
8616 va_end (p);
8617
8618 return args;
8619 }
8620
8621 /* Build a function type. RETURN_TYPE is the type returned by the
8622 function; VAARGS indicates whether the function takes varargs. The
8623 function takes N named arguments, the types of which are provided in
8624 ARG_TYPES. */
8625
8626 static tree
8627 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8628 tree *arg_types)
8629 {
8630 int i;
8631 tree t = vaargs ? NULL_TREE : void_list_node;
8632
8633 for (i = n - 1; i >= 0; i--)
8634 t = tree_cons (NULL_TREE, arg_types[i], t);
8635
8636 return build_function_type (return_type, t);
8637 }
8638
8639 /* Build a function type. RETURN_TYPE is the type returned by the
8640 function. The function takes N named arguments, the types of which
8641 are provided in ARG_TYPES. */
8642
8643 tree
8644 build_function_type_array (tree return_type, int n, tree *arg_types)
8645 {
8646 return build_function_type_array_1 (false, return_type, n, arg_types);
8647 }
8648
8649 /* Build a variable argument function type. RETURN_TYPE is the type
8650 returned by the function. The function takes N named arguments, the
8651 types of which are provided in ARG_TYPES. */
8652
8653 tree
8654 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8655 {
8656 return build_function_type_array_1 (true, return_type, n, arg_types);
8657 }
8658
8659 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8660 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8661 for the method. An implicit additional parameter (of type
8662 pointer-to-BASETYPE) is added to the ARGTYPES. */
8663
8664 tree
8665 build_method_type_directly (tree basetype,
8666 tree rettype,
8667 tree argtypes)
8668 {
8669 tree t;
8670 tree ptype;
8671 inchash::hash hstate;
8672 bool any_structural_p, any_noncanonical_p;
8673 tree canon_argtypes;
8674
8675 /* Make a node of the sort we want. */
8676 t = make_node (METHOD_TYPE);
8677
8678 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8679 TREE_TYPE (t) = rettype;
8680 ptype = build_pointer_type (basetype);
8681
8682 /* The actual arglist for this function includes a "hidden" argument
8683 which is "this". Put it into the list of argument types. */
8684 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8685 TYPE_ARG_TYPES (t) = argtypes;
8686
8687 /* If we already have such a type, use the old one. */
8688 hstate.add_object (TYPE_HASH (basetype));
8689 hstate.add_object (TYPE_HASH (rettype));
8690 type_hash_list (argtypes, hstate);
8691 t = type_hash_canon (hstate.end (), t);
8692
8693 /* Set up the canonical type. */
8694 any_structural_p
8695 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8696 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8697 any_noncanonical_p
8698 = (TYPE_CANONICAL (basetype) != basetype
8699 || TYPE_CANONICAL (rettype) != rettype);
8700 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8701 &any_structural_p,
8702 &any_noncanonical_p);
8703 if (any_structural_p)
8704 SET_TYPE_STRUCTURAL_EQUALITY (t);
8705 else if (any_noncanonical_p)
8706 TYPE_CANONICAL (t)
8707 = build_method_type_directly (TYPE_CANONICAL (basetype),
8708 TYPE_CANONICAL (rettype),
8709 canon_argtypes);
8710 if (!COMPLETE_TYPE_P (t))
8711 layout_type (t);
8712
8713 return t;
8714 }
8715
8716 /* Construct, lay out and return the type of methods belonging to class
8717 BASETYPE and whose arguments and values are described by TYPE.
8718 If that type exists already, reuse it.
8719 TYPE must be a FUNCTION_TYPE node. */
8720
8721 tree
8722 build_method_type (tree basetype, tree type)
8723 {
8724 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8725
8726 return build_method_type_directly (basetype,
8727 TREE_TYPE (type),
8728 TYPE_ARG_TYPES (type));
8729 }
8730
8731 /* Construct, lay out and return the type of offsets to a value
8732 of type TYPE, within an object of type BASETYPE.
8733 If a suitable offset type exists already, reuse it. */
8734
8735 tree
8736 build_offset_type (tree basetype, tree type)
8737 {
8738 tree t;
8739 inchash::hash hstate;
8740
8741 /* Make a node of the sort we want. */
8742 t = make_node (OFFSET_TYPE);
8743
8744 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8745 TREE_TYPE (t) = type;
8746
8747 /* If we already have such a type, use the old one. */
8748 hstate.add_object (TYPE_HASH (basetype));
8749 hstate.add_object (TYPE_HASH (type));
8750 t = type_hash_canon (hstate.end (), t);
8751
8752 if (!COMPLETE_TYPE_P (t))
8753 layout_type (t);
8754
8755 if (TYPE_CANONICAL (t) == t)
8756 {
8757 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8758 || TYPE_STRUCTURAL_EQUALITY_P (type))
8759 SET_TYPE_STRUCTURAL_EQUALITY (t);
8760 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8761 || TYPE_CANONICAL (type) != type)
8762 TYPE_CANONICAL (t)
8763 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8764 TYPE_CANONICAL (type));
8765 }
8766
8767 return t;
8768 }
8769
8770 /* Create a complex type whose components are COMPONENT_TYPE. */
8771
8772 tree
8773 build_complex_type (tree component_type)
8774 {
8775 tree t;
8776 inchash::hash hstate;
8777
8778 gcc_assert (INTEGRAL_TYPE_P (component_type)
8779 || SCALAR_FLOAT_TYPE_P (component_type)
8780 || FIXED_POINT_TYPE_P (component_type));
8781
8782 /* Make a node of the sort we want. */
8783 t = make_node (COMPLEX_TYPE);
8784
8785 TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type);
8786 SET_TYPE_MODE (t, GET_MODE_COMPLEX_MODE (TYPE_MODE (component_type)));
8787
8788 /* If we already have such a type, use the old one. */
8789 hstate.add_object (TYPE_HASH (component_type));
8790 t = type_hash_canon (hstate.end (), t);
8791
8792 if (!COMPLETE_TYPE_P (t))
8793 layout_type (t);
8794
8795 if (TYPE_CANONICAL (t) == t)
8796 {
8797 if (TYPE_STRUCTURAL_EQUALITY_P (component_type))
8798 SET_TYPE_STRUCTURAL_EQUALITY (t);
8799 else if (TYPE_CANONICAL (component_type) != component_type)
8800 TYPE_CANONICAL (t)
8801 = build_complex_type (TYPE_CANONICAL (component_type));
8802 }
8803
8804 /* We need to create a name, since complex is a fundamental type. */
8805 if (! TYPE_NAME (t))
8806 {
8807 const char *name;
8808 if (component_type == char_type_node)
8809 name = "complex char";
8810 else if (component_type == signed_char_type_node)
8811 name = "complex signed char";
8812 else if (component_type == unsigned_char_type_node)
8813 name = "complex unsigned char";
8814 else if (component_type == short_integer_type_node)
8815 name = "complex short int";
8816 else if (component_type == short_unsigned_type_node)
8817 name = "complex short unsigned int";
8818 else if (component_type == integer_type_node)
8819 name = "complex int";
8820 else if (component_type == unsigned_type_node)
8821 name = "complex unsigned int";
8822 else if (component_type == long_integer_type_node)
8823 name = "complex long int";
8824 else if (component_type == long_unsigned_type_node)
8825 name = "complex long unsigned int";
8826 else if (component_type == long_long_integer_type_node)
8827 name = "complex long long int";
8828 else if (component_type == long_long_unsigned_type_node)
8829 name = "complex long long unsigned int";
8830 else
8831 name = 0;
8832
8833 if (name != 0)
8834 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8835 get_identifier (name), t);
8836 }
8837
8838 return build_qualified_type (t, TYPE_QUALS (component_type));
8839 }
8840
8841 /* If TYPE is a real or complex floating-point type and the target
8842 does not directly support arithmetic on TYPE then return the wider
8843 type to be used for arithmetic on TYPE. Otherwise, return
8844 NULL_TREE. */
8845
8846 tree
8847 excess_precision_type (tree type)
8848 {
8849 if (flag_excess_precision != EXCESS_PRECISION_FAST)
8850 {
8851 int flt_eval_method = TARGET_FLT_EVAL_METHOD;
8852 switch (TREE_CODE (type))
8853 {
8854 case REAL_TYPE:
8855 switch (flt_eval_method)
8856 {
8857 case 1:
8858 if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
8859 return double_type_node;
8860 break;
8861 case 2:
8862 if (TYPE_MODE (type) == TYPE_MODE (float_type_node)
8863 || TYPE_MODE (type) == TYPE_MODE (double_type_node))
8864 return long_double_type_node;
8865 break;
8866 default:
8867 gcc_unreachable ();
8868 }
8869 break;
8870 case COMPLEX_TYPE:
8871 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8872 return NULL_TREE;
8873 switch (flt_eval_method)
8874 {
8875 case 1:
8876 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node))
8877 return complex_double_type_node;
8878 break;
8879 case 2:
8880 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node)
8881 || (TYPE_MODE (TREE_TYPE (type))
8882 == TYPE_MODE (double_type_node)))
8883 return complex_long_double_type_node;
8884 break;
8885 default:
8886 gcc_unreachable ();
8887 }
8888 break;
8889 default:
8890 break;
8891 }
8892 }
8893 return NULL_TREE;
8894 }
8895 \f
8896 /* Return OP, stripped of any conversions to wider types as much as is safe.
8897 Converting the value back to OP's type makes a value equivalent to OP.
8898
8899 If FOR_TYPE is nonzero, we return a value which, if converted to
8900 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8901
8902 OP must have integer, real or enumeral type. Pointers are not allowed!
8903
8904 There are some cases where the obvious value we could return
8905 would regenerate to OP if converted to OP's type,
8906 but would not extend like OP to wider types.
8907 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8908 For example, if OP is (unsigned short)(signed char)-1,
8909 we avoid returning (signed char)-1 if FOR_TYPE is int,
8910 even though extending that to an unsigned short would regenerate OP,
8911 since the result of extending (signed char)-1 to (int)
8912 is different from (int) OP. */
8913
8914 tree
8915 get_unwidened (tree op, tree for_type)
8916 {
8917 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8918 tree type = TREE_TYPE (op);
8919 unsigned final_prec
8920 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8921 int uns
8922 = (for_type != 0 && for_type != type
8923 && final_prec > TYPE_PRECISION (type)
8924 && TYPE_UNSIGNED (type));
8925 tree win = op;
8926
8927 while (CONVERT_EXPR_P (op))
8928 {
8929 int bitschange;
8930
8931 /* TYPE_PRECISION on vector types has different meaning
8932 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8933 so avoid them here. */
8934 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8935 break;
8936
8937 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8938 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8939
8940 /* Truncations are many-one so cannot be removed.
8941 Unless we are later going to truncate down even farther. */
8942 if (bitschange < 0
8943 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8944 break;
8945
8946 /* See what's inside this conversion. If we decide to strip it,
8947 we will set WIN. */
8948 op = TREE_OPERAND (op, 0);
8949
8950 /* If we have not stripped any zero-extensions (uns is 0),
8951 we can strip any kind of extension.
8952 If we have previously stripped a zero-extension,
8953 only zero-extensions can safely be stripped.
8954 Any extension can be stripped if the bits it would produce
8955 are all going to be discarded later by truncating to FOR_TYPE. */
8956
8957 if (bitschange > 0)
8958 {
8959 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8960 win = op;
8961 /* TYPE_UNSIGNED says whether this is a zero-extension.
8962 Let's avoid computing it if it does not affect WIN
8963 and if UNS will not be needed again. */
8964 if ((uns
8965 || CONVERT_EXPR_P (op))
8966 && TYPE_UNSIGNED (TREE_TYPE (op)))
8967 {
8968 uns = 1;
8969 win = op;
8970 }
8971 }
8972 }
8973
8974 /* If we finally reach a constant see if it fits in for_type and
8975 in that case convert it. */
8976 if (for_type
8977 && TREE_CODE (win) == INTEGER_CST
8978 && TREE_TYPE (win) != for_type
8979 && int_fits_type_p (win, for_type))
8980 win = fold_convert (for_type, win);
8981
8982 return win;
8983 }
8984 \f
8985 /* Return OP or a simpler expression for a narrower value
8986 which can be sign-extended or zero-extended to give back OP.
8987 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8988 or 0 if the value should be sign-extended. */
8989
8990 tree
8991 get_narrower (tree op, int *unsignedp_ptr)
8992 {
8993 int uns = 0;
8994 int first = 1;
8995 tree win = op;
8996 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8997
8998 while (TREE_CODE (op) == NOP_EXPR)
8999 {
9000 int bitschange
9001 = (TYPE_PRECISION (TREE_TYPE (op))
9002 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
9003
9004 /* Truncations are many-one so cannot be removed. */
9005 if (bitschange < 0)
9006 break;
9007
9008 /* See what's inside this conversion. If we decide to strip it,
9009 we will set WIN. */
9010
9011 if (bitschange > 0)
9012 {
9013 op = TREE_OPERAND (op, 0);
9014 /* An extension: the outermost one can be stripped,
9015 but remember whether it is zero or sign extension. */
9016 if (first)
9017 uns = TYPE_UNSIGNED (TREE_TYPE (op));
9018 /* Otherwise, if a sign extension has been stripped,
9019 only sign extensions can now be stripped;
9020 if a zero extension has been stripped, only zero-extensions. */
9021 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
9022 break;
9023 first = 0;
9024 }
9025 else /* bitschange == 0 */
9026 {
9027 /* A change in nominal type can always be stripped, but we must
9028 preserve the unsignedness. */
9029 if (first)
9030 uns = TYPE_UNSIGNED (TREE_TYPE (op));
9031 first = 0;
9032 op = TREE_OPERAND (op, 0);
9033 /* Keep trying to narrow, but don't assign op to win if it
9034 would turn an integral type into something else. */
9035 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
9036 continue;
9037 }
9038
9039 win = op;
9040 }
9041
9042 if (TREE_CODE (op) == COMPONENT_REF
9043 /* Since type_for_size always gives an integer type. */
9044 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
9045 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
9046 /* Ensure field is laid out already. */
9047 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
9048 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
9049 {
9050 unsigned HOST_WIDE_INT innerprec
9051 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
9052 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
9053 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
9054 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
9055
9056 /* We can get this structure field in a narrower type that fits it,
9057 but the resulting extension to its nominal type (a fullword type)
9058 must satisfy the same conditions as for other extensions.
9059
9060 Do this only for fields that are aligned (not bit-fields),
9061 because when bit-field insns will be used there is no
9062 advantage in doing this. */
9063
9064 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
9065 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
9066 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
9067 && type != 0)
9068 {
9069 if (first)
9070 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
9071 win = fold_convert (type, op);
9072 }
9073 }
9074
9075 *unsignedp_ptr = uns;
9076 return win;
9077 }
9078 \f
9079 /* Returns true if integer constant C has a value that is permissible
9080 for type TYPE (an INTEGER_TYPE). */
9081
9082 bool
9083 int_fits_type_p (const_tree c, const_tree type)
9084 {
9085 tree type_low_bound, type_high_bound;
9086 bool ok_for_low_bound, ok_for_high_bound;
9087 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
9088
9089 retry:
9090 type_low_bound = TYPE_MIN_VALUE (type);
9091 type_high_bound = TYPE_MAX_VALUE (type);
9092
9093 /* If at least one bound of the type is a constant integer, we can check
9094 ourselves and maybe make a decision. If no such decision is possible, but
9095 this type is a subtype, try checking against that. Otherwise, use
9096 fits_to_tree_p, which checks against the precision.
9097
9098 Compute the status for each possibly constant bound, and return if we see
9099 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
9100 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
9101 for "constant known to fit". */
9102
9103 /* Check if c >= type_low_bound. */
9104 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
9105 {
9106 if (tree_int_cst_lt (c, type_low_bound))
9107 return false;
9108 ok_for_low_bound = true;
9109 }
9110 else
9111 ok_for_low_bound = false;
9112
9113 /* Check if c <= type_high_bound. */
9114 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
9115 {
9116 if (tree_int_cst_lt (type_high_bound, c))
9117 return false;
9118 ok_for_high_bound = true;
9119 }
9120 else
9121 ok_for_high_bound = false;
9122
9123 /* If the constant fits both bounds, the result is known. */
9124 if (ok_for_low_bound && ok_for_high_bound)
9125 return true;
9126
9127 /* Perform some generic filtering which may allow making a decision
9128 even if the bounds are not constant. First, negative integers
9129 never fit in unsigned types, */
9130 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (c))
9131 return false;
9132
9133 /* Second, narrower types always fit in wider ones. */
9134 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
9135 return true;
9136
9137 /* Third, unsigned integers with top bit set never fit signed types. */
9138 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
9139 {
9140 int prec = GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (c))) - 1;
9141 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
9142 {
9143 /* When a tree_cst is converted to a wide-int, the precision
9144 is taken from the type. However, if the precision of the
9145 mode underneath the type is smaller than that, it is
9146 possible that the value will not fit. The test below
9147 fails if any bit is set between the sign bit of the
9148 underlying mode and the top bit of the type. */
9149 if (wi::ne_p (wi::zext (c, prec - 1), c))
9150 return false;
9151 }
9152 else if (wi::neg_p (c))
9153 return false;
9154 }
9155
9156 /* If we haven't been able to decide at this point, there nothing more we
9157 can check ourselves here. Look at the base type if we have one and it
9158 has the same precision. */
9159 if (TREE_CODE (type) == INTEGER_TYPE
9160 && TREE_TYPE (type) != 0
9161 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
9162 {
9163 type = TREE_TYPE (type);
9164 goto retry;
9165 }
9166
9167 /* Or to fits_to_tree_p, if nothing else. */
9168 return wi::fits_to_tree_p (c, type);
9169 }
9170
9171 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
9172 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
9173 represented (assuming two's-complement arithmetic) within the bit
9174 precision of the type are returned instead. */
9175
9176 void
9177 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
9178 {
9179 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
9180 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
9181 wi::to_mpz (TYPE_MIN_VALUE (type), min, TYPE_SIGN (type));
9182 else
9183 {
9184 if (TYPE_UNSIGNED (type))
9185 mpz_set_ui (min, 0);
9186 else
9187 {
9188 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
9189 wi::to_mpz (mn, min, SIGNED);
9190 }
9191 }
9192
9193 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
9194 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
9195 wi::to_mpz (TYPE_MAX_VALUE (type), max, TYPE_SIGN (type));
9196 else
9197 {
9198 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
9199 wi::to_mpz (mn, max, TYPE_SIGN (type));
9200 }
9201 }
9202
9203 /* Return true if VAR is an automatic variable defined in function FN. */
9204
9205 bool
9206 auto_var_in_fn_p (const_tree var, const_tree fn)
9207 {
9208 return (DECL_P (var) && DECL_CONTEXT (var) == fn
9209 && ((((TREE_CODE (var) == VAR_DECL && ! DECL_EXTERNAL (var))
9210 || TREE_CODE (var) == PARM_DECL)
9211 && ! TREE_STATIC (var))
9212 || TREE_CODE (var) == LABEL_DECL
9213 || TREE_CODE (var) == RESULT_DECL));
9214 }
9215
9216 /* Subprogram of following function. Called by walk_tree.
9217
9218 Return *TP if it is an automatic variable or parameter of the
9219 function passed in as DATA. */
9220
9221 static tree
9222 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
9223 {
9224 tree fn = (tree) data;
9225
9226 if (TYPE_P (*tp))
9227 *walk_subtrees = 0;
9228
9229 else if (DECL_P (*tp)
9230 && auto_var_in_fn_p (*tp, fn))
9231 return *tp;
9232
9233 return NULL_TREE;
9234 }
9235
9236 /* Returns true if T is, contains, or refers to a type with variable
9237 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
9238 arguments, but not the return type. If FN is nonzero, only return
9239 true if a modifier of the type or position of FN is a variable or
9240 parameter inside FN.
9241
9242 This concept is more general than that of C99 'variably modified types':
9243 in C99, a struct type is never variably modified because a VLA may not
9244 appear as a structure member. However, in GNU C code like:
9245
9246 struct S { int i[f()]; };
9247
9248 is valid, and other languages may define similar constructs. */
9249
9250 bool
9251 variably_modified_type_p (tree type, tree fn)
9252 {
9253 tree t;
9254
9255 /* Test if T is either variable (if FN is zero) or an expression containing
9256 a variable in FN. If TYPE isn't gimplified, return true also if
9257 gimplify_one_sizepos would gimplify the expression into a local
9258 variable. */
9259 #define RETURN_TRUE_IF_VAR(T) \
9260 do { tree _t = (T); \
9261 if (_t != NULL_TREE \
9262 && _t != error_mark_node \
9263 && TREE_CODE (_t) != INTEGER_CST \
9264 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
9265 && (!fn \
9266 || (!TYPE_SIZES_GIMPLIFIED (type) \
9267 && !is_gimple_sizepos (_t)) \
9268 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
9269 return true; } while (0)
9270
9271 if (type == error_mark_node)
9272 return false;
9273
9274 /* If TYPE itself has variable size, it is variably modified. */
9275 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
9276 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
9277
9278 switch (TREE_CODE (type))
9279 {
9280 case POINTER_TYPE:
9281 case REFERENCE_TYPE:
9282 case VECTOR_TYPE:
9283 if (variably_modified_type_p (TREE_TYPE (type), fn))
9284 return true;
9285 break;
9286
9287 case FUNCTION_TYPE:
9288 case METHOD_TYPE:
9289 /* If TYPE is a function type, it is variably modified if the
9290 return type is variably modified. */
9291 if (variably_modified_type_p (TREE_TYPE (type), fn))
9292 return true;
9293 break;
9294
9295 case INTEGER_TYPE:
9296 case REAL_TYPE:
9297 case FIXED_POINT_TYPE:
9298 case ENUMERAL_TYPE:
9299 case BOOLEAN_TYPE:
9300 /* Scalar types are variably modified if their end points
9301 aren't constant. */
9302 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
9303 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
9304 break;
9305
9306 case RECORD_TYPE:
9307 case UNION_TYPE:
9308 case QUAL_UNION_TYPE:
9309 /* We can't see if any of the fields are variably-modified by the
9310 definition we normally use, since that would produce infinite
9311 recursion via pointers. */
9312 /* This is variably modified if some field's type is. */
9313 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
9314 if (TREE_CODE (t) == FIELD_DECL)
9315 {
9316 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
9317 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
9318 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
9319
9320 if (TREE_CODE (type) == QUAL_UNION_TYPE)
9321 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
9322 }
9323 break;
9324
9325 case ARRAY_TYPE:
9326 /* Do not call ourselves to avoid infinite recursion. This is
9327 variably modified if the element type is. */
9328 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
9329 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
9330 break;
9331
9332 default:
9333 break;
9334 }
9335
9336 /* The current language may have other cases to check, but in general,
9337 all other types are not variably modified. */
9338 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
9339
9340 #undef RETURN_TRUE_IF_VAR
9341 }
9342
9343 /* Given a DECL or TYPE, return the scope in which it was declared, or
9344 NULL_TREE if there is no containing scope. */
9345
9346 tree
9347 get_containing_scope (const_tree t)
9348 {
9349 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
9350 }
9351
9352 /* Return the innermost context enclosing DECL that is
9353 a FUNCTION_DECL, or zero if none. */
9354
9355 tree
9356 decl_function_context (const_tree decl)
9357 {
9358 tree context;
9359
9360 if (TREE_CODE (decl) == ERROR_MARK)
9361 return 0;
9362
9363 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
9364 where we look up the function at runtime. Such functions always take
9365 a first argument of type 'pointer to real context'.
9366
9367 C++ should really be fixed to use DECL_CONTEXT for the real context,
9368 and use something else for the "virtual context". */
9369 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
9370 context
9371 = TYPE_MAIN_VARIANT
9372 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
9373 else
9374 context = DECL_CONTEXT (decl);
9375
9376 while (context && TREE_CODE (context) != FUNCTION_DECL)
9377 {
9378 if (TREE_CODE (context) == BLOCK)
9379 context = BLOCK_SUPERCONTEXT (context);
9380 else
9381 context = get_containing_scope (context);
9382 }
9383
9384 return context;
9385 }
9386
9387 /* Return the innermost context enclosing DECL that is
9388 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
9389 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
9390
9391 tree
9392 decl_type_context (const_tree decl)
9393 {
9394 tree context = DECL_CONTEXT (decl);
9395
9396 while (context)
9397 switch (TREE_CODE (context))
9398 {
9399 case NAMESPACE_DECL:
9400 case TRANSLATION_UNIT_DECL:
9401 return NULL_TREE;
9402
9403 case RECORD_TYPE:
9404 case UNION_TYPE:
9405 case QUAL_UNION_TYPE:
9406 return context;
9407
9408 case TYPE_DECL:
9409 case FUNCTION_DECL:
9410 context = DECL_CONTEXT (context);
9411 break;
9412
9413 case BLOCK:
9414 context = BLOCK_SUPERCONTEXT (context);
9415 break;
9416
9417 default:
9418 gcc_unreachable ();
9419 }
9420
9421 return NULL_TREE;
9422 }
9423
9424 /* CALL is a CALL_EXPR. Return the declaration for the function
9425 called, or NULL_TREE if the called function cannot be
9426 determined. */
9427
9428 tree
9429 get_callee_fndecl (const_tree call)
9430 {
9431 tree addr;
9432
9433 if (call == error_mark_node)
9434 return error_mark_node;
9435
9436 /* It's invalid to call this function with anything but a
9437 CALL_EXPR. */
9438 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9439
9440 /* The first operand to the CALL is the address of the function
9441 called. */
9442 addr = CALL_EXPR_FN (call);
9443
9444 /* If there is no function, return early. */
9445 if (addr == NULL_TREE)
9446 return NULL_TREE;
9447
9448 STRIP_NOPS (addr);
9449
9450 /* If this is a readonly function pointer, extract its initial value. */
9451 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
9452 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
9453 && DECL_INITIAL (addr))
9454 addr = DECL_INITIAL (addr);
9455
9456 /* If the address is just `&f' for some function `f', then we know
9457 that `f' is being called. */
9458 if (TREE_CODE (addr) == ADDR_EXPR
9459 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
9460 return TREE_OPERAND (addr, 0);
9461
9462 /* We couldn't figure out what was being called. */
9463 return NULL_TREE;
9464 }
9465
9466 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
9467 return the associated function code, otherwise return CFN_LAST. */
9468
9469 combined_fn
9470 get_call_combined_fn (const_tree call)
9471 {
9472 /* It's invalid to call this function with anything but a CALL_EXPR. */
9473 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9474
9475 if (!CALL_EXPR_FN (call))
9476 return as_combined_fn (CALL_EXPR_IFN (call));
9477
9478 tree fndecl = get_callee_fndecl (call);
9479 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
9480 return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
9481
9482 return CFN_LAST;
9483 }
9484
9485 #define TREE_MEM_USAGE_SPACES 40
9486
9487 /* Print debugging information about tree nodes generated during the compile,
9488 and any language-specific information. */
9489
9490 void
9491 dump_tree_statistics (void)
9492 {
9493 if (GATHER_STATISTICS)
9494 {
9495 int i;
9496 int total_nodes, total_bytes;
9497 fprintf (stderr, "\nKind Nodes Bytes\n");
9498 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9499 total_nodes = total_bytes = 0;
9500 for (i = 0; i < (int) all_kinds; i++)
9501 {
9502 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i],
9503 tree_node_counts[i], tree_node_sizes[i]);
9504 total_nodes += tree_node_counts[i];
9505 total_bytes += tree_node_sizes[i];
9506 }
9507 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9508 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes);
9509 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9510 fprintf (stderr, "Code Nodes\n");
9511 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9512 for (i = 0; i < (int) MAX_TREE_CODES; i++)
9513 fprintf (stderr, "%-32s %7d\n", get_tree_code_name ((enum tree_code) i),
9514 tree_code_counts[i]);
9515 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9516 fprintf (stderr, "\n");
9517 ssanames_print_statistics ();
9518 fprintf (stderr, "\n");
9519 phinodes_print_statistics ();
9520 fprintf (stderr, "\n");
9521 }
9522 else
9523 fprintf (stderr, "(No per-node statistics)\n");
9524
9525 print_type_hash_statistics ();
9526 print_debug_expr_statistics ();
9527 print_value_expr_statistics ();
9528 lang_hooks.print_statistics ();
9529 }
9530 \f
9531 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9532
9533 /* Generate a crc32 of a byte. */
9534
9535 static unsigned
9536 crc32_unsigned_bits (unsigned chksum, unsigned value, unsigned bits)
9537 {
9538 unsigned ix;
9539
9540 for (ix = bits; ix--; value <<= 1)
9541 {
9542 unsigned feedback;
9543
9544 feedback = (value ^ chksum) & 0x80000000 ? 0x04c11db7 : 0;
9545 chksum <<= 1;
9546 chksum ^= feedback;
9547 }
9548 return chksum;
9549 }
9550
9551 /* Generate a crc32 of a 32-bit unsigned. */
9552
9553 unsigned
9554 crc32_unsigned (unsigned chksum, unsigned value)
9555 {
9556 return crc32_unsigned_bits (chksum, value, 32);
9557 }
9558
9559 /* Generate a crc32 of a byte. */
9560
9561 unsigned
9562 crc32_byte (unsigned chksum, char byte)
9563 {
9564 return crc32_unsigned_bits (chksum, (unsigned) byte << 24, 8);
9565 }
9566
9567 /* Generate a crc32 of a string. */
9568
9569 unsigned
9570 crc32_string (unsigned chksum, const char *string)
9571 {
9572 do
9573 {
9574 chksum = crc32_byte (chksum, *string);
9575 }
9576 while (*string++);
9577 return chksum;
9578 }
9579
9580 /* P is a string that will be used in a symbol. Mask out any characters
9581 that are not valid in that context. */
9582
9583 void
9584 clean_symbol_name (char *p)
9585 {
9586 for (; *p; p++)
9587 if (! (ISALNUM (*p)
9588 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9589 || *p == '$'
9590 #endif
9591 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9592 || *p == '.'
9593 #endif
9594 ))
9595 *p = '_';
9596 }
9597
9598 /* For anonymous aggregate types, we need some sort of name to
9599 hold on to. In practice, this should not appear, but it should
9600 not be harmful if it does. */
9601 bool
9602 anon_aggrname_p(const_tree id_node)
9603 {
9604 #ifndef NO_DOT_IN_LABEL
9605 return (IDENTIFIER_POINTER (id_node)[0] == '.'
9606 && IDENTIFIER_POINTER (id_node)[1] == '_');
9607 #else /* NO_DOT_IN_LABEL */
9608 #ifndef NO_DOLLAR_IN_LABEL
9609 return (IDENTIFIER_POINTER (id_node)[0] == '$' \
9610 && IDENTIFIER_POINTER (id_node)[1] == '_');
9611 #else /* NO_DOLLAR_IN_LABEL */
9612 #define ANON_AGGRNAME_PREFIX "__anon_"
9613 return (!strncmp (IDENTIFIER_POINTER (id_node), ANON_AGGRNAME_PREFIX,
9614 sizeof (ANON_AGGRNAME_PREFIX) - 1));
9615 #endif /* NO_DOLLAR_IN_LABEL */
9616 #endif /* NO_DOT_IN_LABEL */
9617 }
9618
9619 /* Return a format for an anonymous aggregate name. */
9620 const char *
9621 anon_aggrname_format()
9622 {
9623 #ifndef NO_DOT_IN_LABEL
9624 return "._%d";
9625 #else /* NO_DOT_IN_LABEL */
9626 #ifndef NO_DOLLAR_IN_LABEL
9627 return "$_%d";
9628 #else /* NO_DOLLAR_IN_LABEL */
9629 return "__anon_%d";
9630 #endif /* NO_DOLLAR_IN_LABEL */
9631 #endif /* NO_DOT_IN_LABEL */
9632 }
9633
9634 /* Generate a name for a special-purpose function.
9635 The generated name may need to be unique across the whole link.
9636 Changes to this function may also require corresponding changes to
9637 xstrdup_mask_random.
9638 TYPE is some string to identify the purpose of this function to the
9639 linker or collect2; it must start with an uppercase letter,
9640 one of:
9641 I - for constructors
9642 D - for destructors
9643 N - for C++ anonymous namespaces
9644 F - for DWARF unwind frame information. */
9645
9646 tree
9647 get_file_function_name (const char *type)
9648 {
9649 char *buf;
9650 const char *p;
9651 char *q;
9652
9653 /* If we already have a name we know to be unique, just use that. */
9654 if (first_global_object_name)
9655 p = q = ASTRDUP (first_global_object_name);
9656 /* If the target is handling the constructors/destructors, they
9657 will be local to this file and the name is only necessary for
9658 debugging purposes.
9659 We also assign sub_I and sub_D sufixes to constructors called from
9660 the global static constructors. These are always local. */
9661 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9662 || (strncmp (type, "sub_", 4) == 0
9663 && (type[4] == 'I' || type[4] == 'D')))
9664 {
9665 const char *file = main_input_filename;
9666 if (! file)
9667 file = LOCATION_FILE (input_location);
9668 /* Just use the file's basename, because the full pathname
9669 might be quite long. */
9670 p = q = ASTRDUP (lbasename (file));
9671 }
9672 else
9673 {
9674 /* Otherwise, the name must be unique across the entire link.
9675 We don't have anything that we know to be unique to this translation
9676 unit, so use what we do have and throw in some randomness. */
9677 unsigned len;
9678 const char *name = weak_global_object_name;
9679 const char *file = main_input_filename;
9680
9681 if (! name)
9682 name = "";
9683 if (! file)
9684 file = LOCATION_FILE (input_location);
9685
9686 len = strlen (file);
9687 q = (char *) alloca (9 + 17 + len + 1);
9688 memcpy (q, file, len + 1);
9689
9690 snprintf (q + len, 9 + 17 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9691 crc32_string (0, name), get_random_seed (false));
9692
9693 p = q;
9694 }
9695
9696 clean_symbol_name (q);
9697 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9698 + strlen (type));
9699
9700 /* Set up the name of the file-level functions we may need.
9701 Use a global object (which is already required to be unique over
9702 the program) rather than the file name (which imposes extra
9703 constraints). */
9704 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9705
9706 return get_identifier (buf);
9707 }
9708 \f
9709 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9710
9711 /* Complain that the tree code of NODE does not match the expected 0
9712 terminated list of trailing codes. The trailing code list can be
9713 empty, for a more vague error message. FILE, LINE, and FUNCTION
9714 are of the caller. */
9715
9716 void
9717 tree_check_failed (const_tree node, const char *file,
9718 int line, const char *function, ...)
9719 {
9720 va_list args;
9721 const char *buffer;
9722 unsigned length = 0;
9723 enum tree_code code;
9724
9725 va_start (args, function);
9726 while ((code = (enum tree_code) va_arg (args, int)))
9727 length += 4 + strlen (get_tree_code_name (code));
9728 va_end (args);
9729 if (length)
9730 {
9731 char *tmp;
9732 va_start (args, function);
9733 length += strlen ("expected ");
9734 buffer = tmp = (char *) alloca (length);
9735 length = 0;
9736 while ((code = (enum tree_code) va_arg (args, int)))
9737 {
9738 const char *prefix = length ? " or " : "expected ";
9739
9740 strcpy (tmp + length, prefix);
9741 length += strlen (prefix);
9742 strcpy (tmp + length, get_tree_code_name (code));
9743 length += strlen (get_tree_code_name (code));
9744 }
9745 va_end (args);
9746 }
9747 else
9748 buffer = "unexpected node";
9749
9750 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9751 buffer, get_tree_code_name (TREE_CODE (node)),
9752 function, trim_filename (file), line);
9753 }
9754
9755 /* Complain that the tree code of NODE does match the expected 0
9756 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9757 the caller. */
9758
9759 void
9760 tree_not_check_failed (const_tree node, const char *file,
9761 int line, const char *function, ...)
9762 {
9763 va_list args;
9764 char *buffer;
9765 unsigned length = 0;
9766 enum tree_code code;
9767
9768 va_start (args, function);
9769 while ((code = (enum tree_code) va_arg (args, int)))
9770 length += 4 + strlen (get_tree_code_name (code));
9771 va_end (args);
9772 va_start (args, function);
9773 buffer = (char *) alloca (length);
9774 length = 0;
9775 while ((code = (enum tree_code) va_arg (args, int)))
9776 {
9777 if (length)
9778 {
9779 strcpy (buffer + length, " or ");
9780 length += 4;
9781 }
9782 strcpy (buffer + length, get_tree_code_name (code));
9783 length += strlen (get_tree_code_name (code));
9784 }
9785 va_end (args);
9786
9787 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9788 buffer, get_tree_code_name (TREE_CODE (node)),
9789 function, trim_filename (file), line);
9790 }
9791
9792 /* Similar to tree_check_failed, except that we check for a class of tree
9793 code, given in CL. */
9794
9795 void
9796 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9797 const char *file, int line, const char *function)
9798 {
9799 internal_error
9800 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9801 TREE_CODE_CLASS_STRING (cl),
9802 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9803 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9804 }
9805
9806 /* Similar to tree_check_failed, except that instead of specifying a
9807 dozen codes, use the knowledge that they're all sequential. */
9808
9809 void
9810 tree_range_check_failed (const_tree node, const char *file, int line,
9811 const char *function, enum tree_code c1,
9812 enum tree_code c2)
9813 {
9814 char *buffer;
9815 unsigned length = 0;
9816 unsigned int c;
9817
9818 for (c = c1; c <= c2; ++c)
9819 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9820
9821 length += strlen ("expected ");
9822 buffer = (char *) alloca (length);
9823 length = 0;
9824
9825 for (c = c1; c <= c2; ++c)
9826 {
9827 const char *prefix = length ? " or " : "expected ";
9828
9829 strcpy (buffer + length, prefix);
9830 length += strlen (prefix);
9831 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9832 length += strlen (get_tree_code_name ((enum tree_code) c));
9833 }
9834
9835 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9836 buffer, get_tree_code_name (TREE_CODE (node)),
9837 function, trim_filename (file), line);
9838 }
9839
9840
9841 /* Similar to tree_check_failed, except that we check that a tree does
9842 not have the specified code, given in CL. */
9843
9844 void
9845 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9846 const char *file, int line, const char *function)
9847 {
9848 internal_error
9849 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9850 TREE_CODE_CLASS_STRING (cl),
9851 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9852 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9853 }
9854
9855
9856 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9857
9858 void
9859 omp_clause_check_failed (const_tree node, const char *file, int line,
9860 const char *function, enum omp_clause_code code)
9861 {
9862 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9863 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9864 function, trim_filename (file), line);
9865 }
9866
9867
9868 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9869
9870 void
9871 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9872 const char *function, enum omp_clause_code c1,
9873 enum omp_clause_code c2)
9874 {
9875 char *buffer;
9876 unsigned length = 0;
9877 unsigned int c;
9878
9879 for (c = c1; c <= c2; ++c)
9880 length += 4 + strlen (omp_clause_code_name[c]);
9881
9882 length += strlen ("expected ");
9883 buffer = (char *) alloca (length);
9884 length = 0;
9885
9886 for (c = c1; c <= c2; ++c)
9887 {
9888 const char *prefix = length ? " or " : "expected ";
9889
9890 strcpy (buffer + length, prefix);
9891 length += strlen (prefix);
9892 strcpy (buffer + length, omp_clause_code_name[c]);
9893 length += strlen (omp_clause_code_name[c]);
9894 }
9895
9896 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9897 buffer, omp_clause_code_name[TREE_CODE (node)],
9898 function, trim_filename (file), line);
9899 }
9900
9901
9902 #undef DEFTREESTRUCT
9903 #define DEFTREESTRUCT(VAL, NAME) NAME,
9904
9905 static const char *ts_enum_names[] = {
9906 #include "treestruct.def"
9907 };
9908 #undef DEFTREESTRUCT
9909
9910 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9911
9912 /* Similar to tree_class_check_failed, except that we check for
9913 whether CODE contains the tree structure identified by EN. */
9914
9915 void
9916 tree_contains_struct_check_failed (const_tree node,
9917 const enum tree_node_structure_enum en,
9918 const char *file, int line,
9919 const char *function)
9920 {
9921 internal_error
9922 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9923 TS_ENUM_NAME (en),
9924 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9925 }
9926
9927
9928 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9929 (dynamically sized) vector. */
9930
9931 void
9932 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9933 const char *function)
9934 {
9935 internal_error
9936 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9937 idx + 1, len, function, trim_filename (file), line);
9938 }
9939
9940 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9941 (dynamically sized) vector. */
9942
9943 void
9944 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9945 const char *function)
9946 {
9947 internal_error
9948 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9949 idx + 1, len, function, trim_filename (file), line);
9950 }
9951
9952 /* Similar to above, except that the check is for the bounds of the operand
9953 vector of an expression node EXP. */
9954
9955 void
9956 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9957 int line, const char *function)
9958 {
9959 enum tree_code code = TREE_CODE (exp);
9960 internal_error
9961 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9962 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9963 function, trim_filename (file), line);
9964 }
9965
9966 /* Similar to above, except that the check is for the number of
9967 operands of an OMP_CLAUSE node. */
9968
9969 void
9970 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9971 int line, const char *function)
9972 {
9973 internal_error
9974 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9975 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9976 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9977 trim_filename (file), line);
9978 }
9979 #endif /* ENABLE_TREE_CHECKING */
9980 \f
9981 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9982 and mapped to the machine mode MODE. Initialize its fields and build
9983 the information necessary for debugging output. */
9984
9985 static tree
9986 make_vector_type (tree innertype, int nunits, machine_mode mode)
9987 {
9988 tree t;
9989 inchash::hash hstate;
9990 tree mv_innertype = TYPE_MAIN_VARIANT (innertype);
9991
9992 t = make_node (VECTOR_TYPE);
9993 TREE_TYPE (t) = mv_innertype;
9994 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9995 SET_TYPE_MODE (t, mode);
9996
9997 if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype) || in_lto_p)
9998 SET_TYPE_STRUCTURAL_EQUALITY (t);
9999 else if ((TYPE_CANONICAL (mv_innertype) != innertype
10000 || mode != VOIDmode)
10001 && !VECTOR_BOOLEAN_TYPE_P (t))
10002 TYPE_CANONICAL (t)
10003 = make_vector_type (TYPE_CANONICAL (mv_innertype), nunits, VOIDmode);
10004
10005 layout_type (t);
10006
10007 hstate.add_wide_int (VECTOR_TYPE);
10008 hstate.add_wide_int (nunits);
10009 hstate.add_wide_int (mode);
10010 hstate.add_object (TYPE_HASH (TREE_TYPE (t)));
10011 t = type_hash_canon (hstate.end (), t);
10012
10013 /* We have built a main variant, based on the main variant of the
10014 inner type. Use it to build the variant we return. */
10015 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
10016 && TREE_TYPE (t) != innertype)
10017 return build_type_attribute_qual_variant (t,
10018 TYPE_ATTRIBUTES (innertype),
10019 TYPE_QUALS (innertype));
10020
10021 return t;
10022 }
10023
10024 static tree
10025 make_or_reuse_type (unsigned size, int unsignedp)
10026 {
10027 int i;
10028
10029 if (size == INT_TYPE_SIZE)
10030 return unsignedp ? unsigned_type_node : integer_type_node;
10031 if (size == CHAR_TYPE_SIZE)
10032 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
10033 if (size == SHORT_TYPE_SIZE)
10034 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
10035 if (size == LONG_TYPE_SIZE)
10036 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
10037 if (size == LONG_LONG_TYPE_SIZE)
10038 return (unsignedp ? long_long_unsigned_type_node
10039 : long_long_integer_type_node);
10040
10041 for (i = 0; i < NUM_INT_N_ENTS; i ++)
10042 if (size == int_n_data[i].bitsize
10043 && int_n_enabled_p[i])
10044 return (unsignedp ? int_n_trees[i].unsigned_type
10045 : int_n_trees[i].signed_type);
10046
10047 if (unsignedp)
10048 return make_unsigned_type (size);
10049 else
10050 return make_signed_type (size);
10051 }
10052
10053 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
10054
10055 static tree
10056 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
10057 {
10058 if (satp)
10059 {
10060 if (size == SHORT_FRACT_TYPE_SIZE)
10061 return unsignedp ? sat_unsigned_short_fract_type_node
10062 : sat_short_fract_type_node;
10063 if (size == FRACT_TYPE_SIZE)
10064 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
10065 if (size == LONG_FRACT_TYPE_SIZE)
10066 return unsignedp ? sat_unsigned_long_fract_type_node
10067 : sat_long_fract_type_node;
10068 if (size == LONG_LONG_FRACT_TYPE_SIZE)
10069 return unsignedp ? sat_unsigned_long_long_fract_type_node
10070 : sat_long_long_fract_type_node;
10071 }
10072 else
10073 {
10074 if (size == SHORT_FRACT_TYPE_SIZE)
10075 return unsignedp ? unsigned_short_fract_type_node
10076 : short_fract_type_node;
10077 if (size == FRACT_TYPE_SIZE)
10078 return unsignedp ? unsigned_fract_type_node : fract_type_node;
10079 if (size == LONG_FRACT_TYPE_SIZE)
10080 return unsignedp ? unsigned_long_fract_type_node
10081 : long_fract_type_node;
10082 if (size == LONG_LONG_FRACT_TYPE_SIZE)
10083 return unsignedp ? unsigned_long_long_fract_type_node
10084 : long_long_fract_type_node;
10085 }
10086
10087 return make_fract_type (size, unsignedp, satp);
10088 }
10089
10090 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
10091
10092 static tree
10093 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
10094 {
10095 if (satp)
10096 {
10097 if (size == SHORT_ACCUM_TYPE_SIZE)
10098 return unsignedp ? sat_unsigned_short_accum_type_node
10099 : sat_short_accum_type_node;
10100 if (size == ACCUM_TYPE_SIZE)
10101 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
10102 if (size == LONG_ACCUM_TYPE_SIZE)
10103 return unsignedp ? sat_unsigned_long_accum_type_node
10104 : sat_long_accum_type_node;
10105 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
10106 return unsignedp ? sat_unsigned_long_long_accum_type_node
10107 : sat_long_long_accum_type_node;
10108 }
10109 else
10110 {
10111 if (size == SHORT_ACCUM_TYPE_SIZE)
10112 return unsignedp ? unsigned_short_accum_type_node
10113 : short_accum_type_node;
10114 if (size == ACCUM_TYPE_SIZE)
10115 return unsignedp ? unsigned_accum_type_node : accum_type_node;
10116 if (size == LONG_ACCUM_TYPE_SIZE)
10117 return unsignedp ? unsigned_long_accum_type_node
10118 : long_accum_type_node;
10119 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
10120 return unsignedp ? unsigned_long_long_accum_type_node
10121 : long_long_accum_type_node;
10122 }
10123
10124 return make_accum_type (size, unsignedp, satp);
10125 }
10126
10127
10128 /* Create an atomic variant node for TYPE. This routine is called
10129 during initialization of data types to create the 5 basic atomic
10130 types. The generic build_variant_type function requires these to
10131 already be set up in order to function properly, so cannot be
10132 called from there. If ALIGN is non-zero, then ensure alignment is
10133 overridden to this value. */
10134
10135 static tree
10136 build_atomic_base (tree type, unsigned int align)
10137 {
10138 tree t;
10139
10140 /* Make sure its not already registered. */
10141 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
10142 return t;
10143
10144 t = build_variant_type_copy (type);
10145 set_type_quals (t, TYPE_QUAL_ATOMIC);
10146
10147 if (align)
10148 SET_TYPE_ALIGN (t, align);
10149
10150 return t;
10151 }
10152
10153 /* Create nodes for all integer types (and error_mark_node) using the sizes
10154 of C datatypes. SIGNED_CHAR specifies whether char is signed. */
10155
10156 void
10157 build_common_tree_nodes (bool signed_char)
10158 {
10159 int i;
10160
10161 error_mark_node = make_node (ERROR_MARK);
10162 TREE_TYPE (error_mark_node) = error_mark_node;
10163
10164 initialize_sizetypes ();
10165
10166 /* Define both `signed char' and `unsigned char'. */
10167 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
10168 TYPE_STRING_FLAG (signed_char_type_node) = 1;
10169 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
10170 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
10171
10172 /* Define `char', which is like either `signed char' or `unsigned char'
10173 but not the same as either. */
10174 char_type_node
10175 = (signed_char
10176 ? make_signed_type (CHAR_TYPE_SIZE)
10177 : make_unsigned_type (CHAR_TYPE_SIZE));
10178 TYPE_STRING_FLAG (char_type_node) = 1;
10179
10180 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
10181 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
10182 integer_type_node = make_signed_type (INT_TYPE_SIZE);
10183 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
10184 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
10185 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
10186 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
10187 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
10188
10189 for (i = 0; i < NUM_INT_N_ENTS; i ++)
10190 {
10191 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
10192 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
10193 TYPE_SIZE (int_n_trees[i].signed_type) = bitsize_int (int_n_data[i].bitsize);
10194 TYPE_SIZE (int_n_trees[i].unsigned_type) = bitsize_int (int_n_data[i].bitsize);
10195
10196 if (int_n_data[i].bitsize > LONG_LONG_TYPE_SIZE
10197 && int_n_enabled_p[i])
10198 {
10199 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
10200 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
10201 }
10202 }
10203
10204 /* Define a boolean type. This type only represents boolean values but
10205 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
10206 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
10207 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
10208 TYPE_PRECISION (boolean_type_node) = 1;
10209 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
10210
10211 /* Define what type to use for size_t. */
10212 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
10213 size_type_node = unsigned_type_node;
10214 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
10215 size_type_node = long_unsigned_type_node;
10216 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
10217 size_type_node = long_long_unsigned_type_node;
10218 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
10219 size_type_node = short_unsigned_type_node;
10220 else
10221 {
10222 int i;
10223
10224 size_type_node = NULL_TREE;
10225 for (i = 0; i < NUM_INT_N_ENTS; i++)
10226 if (int_n_enabled_p[i])
10227 {
10228 char name[50];
10229 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
10230
10231 if (strcmp (name, SIZE_TYPE) == 0)
10232 {
10233 size_type_node = int_n_trees[i].unsigned_type;
10234 }
10235 }
10236 if (size_type_node == NULL_TREE)
10237 gcc_unreachable ();
10238 }
10239
10240 /* Fill in the rest of the sized types. Reuse existing type nodes
10241 when possible. */
10242 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
10243 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
10244 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
10245 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
10246 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
10247
10248 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
10249 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
10250 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
10251 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
10252 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
10253
10254 /* Don't call build_qualified type for atomics. That routine does
10255 special processing for atomics, and until they are initialized
10256 it's better not to make that call.
10257
10258 Check to see if there is a target override for atomic types. */
10259
10260 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
10261 targetm.atomic_align_for_mode (QImode));
10262 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
10263 targetm.atomic_align_for_mode (HImode));
10264 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
10265 targetm.atomic_align_for_mode (SImode));
10266 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
10267 targetm.atomic_align_for_mode (DImode));
10268 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
10269 targetm.atomic_align_for_mode (TImode));
10270
10271 access_public_node = get_identifier ("public");
10272 access_protected_node = get_identifier ("protected");
10273 access_private_node = get_identifier ("private");
10274
10275 /* Define these next since types below may used them. */
10276 integer_zero_node = build_int_cst (integer_type_node, 0);
10277 integer_one_node = build_int_cst (integer_type_node, 1);
10278 integer_three_node = build_int_cst (integer_type_node, 3);
10279 integer_minus_one_node = build_int_cst (integer_type_node, -1);
10280
10281 size_zero_node = size_int (0);
10282 size_one_node = size_int (1);
10283 bitsize_zero_node = bitsize_int (0);
10284 bitsize_one_node = bitsize_int (1);
10285 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
10286
10287 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
10288 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
10289
10290 void_type_node = make_node (VOID_TYPE);
10291 layout_type (void_type_node);
10292
10293 pointer_bounds_type_node = targetm.chkp_bound_type ();
10294
10295 /* We are not going to have real types in C with less than byte alignment,
10296 so we might as well not have any types that claim to have it. */
10297 SET_TYPE_ALIGN (void_type_node, BITS_PER_UNIT);
10298 TYPE_USER_ALIGN (void_type_node) = 0;
10299
10300 void_node = make_node (VOID_CST);
10301 TREE_TYPE (void_node) = void_type_node;
10302
10303 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
10304 layout_type (TREE_TYPE (null_pointer_node));
10305
10306 ptr_type_node = build_pointer_type (void_type_node);
10307 const_ptr_type_node
10308 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
10309 fileptr_type_node = ptr_type_node;
10310
10311 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
10312
10313 float_type_node = make_node (REAL_TYPE);
10314 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
10315 layout_type (float_type_node);
10316
10317 double_type_node = make_node (REAL_TYPE);
10318 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
10319 layout_type (double_type_node);
10320
10321 long_double_type_node = make_node (REAL_TYPE);
10322 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
10323 layout_type (long_double_type_node);
10324
10325 float_ptr_type_node = build_pointer_type (float_type_node);
10326 double_ptr_type_node = build_pointer_type (double_type_node);
10327 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
10328 integer_ptr_type_node = build_pointer_type (integer_type_node);
10329
10330 /* Fixed size integer types. */
10331 uint16_type_node = make_or_reuse_type (16, 1);
10332 uint32_type_node = make_or_reuse_type (32, 1);
10333 uint64_type_node = make_or_reuse_type (64, 1);
10334
10335 /* Decimal float types. */
10336 dfloat32_type_node = make_node (REAL_TYPE);
10337 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
10338 layout_type (dfloat32_type_node);
10339 SET_TYPE_MODE (dfloat32_type_node, SDmode);
10340 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
10341
10342 dfloat64_type_node = make_node (REAL_TYPE);
10343 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
10344 layout_type (dfloat64_type_node);
10345 SET_TYPE_MODE (dfloat64_type_node, DDmode);
10346 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
10347
10348 dfloat128_type_node = make_node (REAL_TYPE);
10349 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
10350 layout_type (dfloat128_type_node);
10351 SET_TYPE_MODE (dfloat128_type_node, TDmode);
10352 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
10353
10354 complex_integer_type_node = build_complex_type (integer_type_node);
10355 complex_float_type_node = build_complex_type (float_type_node);
10356 complex_double_type_node = build_complex_type (double_type_node);
10357 complex_long_double_type_node = build_complex_type (long_double_type_node);
10358
10359 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
10360 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
10361 sat_ ## KIND ## _type_node = \
10362 make_sat_signed_ ## KIND ## _type (SIZE); \
10363 sat_unsigned_ ## KIND ## _type_node = \
10364 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10365 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10366 unsigned_ ## KIND ## _type_node = \
10367 make_unsigned_ ## KIND ## _type (SIZE);
10368
10369 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
10370 sat_ ## WIDTH ## KIND ## _type_node = \
10371 make_sat_signed_ ## KIND ## _type (SIZE); \
10372 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
10373 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10374 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10375 unsigned_ ## WIDTH ## KIND ## _type_node = \
10376 make_unsigned_ ## KIND ## _type (SIZE);
10377
10378 /* Make fixed-point type nodes based on four different widths. */
10379 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
10380 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
10381 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
10382 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
10383 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
10384
10385 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
10386 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
10387 NAME ## _type_node = \
10388 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
10389 u ## NAME ## _type_node = \
10390 make_or_reuse_unsigned_ ## KIND ## _type \
10391 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
10392 sat_ ## NAME ## _type_node = \
10393 make_or_reuse_sat_signed_ ## KIND ## _type \
10394 (GET_MODE_BITSIZE (MODE ## mode)); \
10395 sat_u ## NAME ## _type_node = \
10396 make_or_reuse_sat_unsigned_ ## KIND ## _type \
10397 (GET_MODE_BITSIZE (U ## MODE ## mode));
10398
10399 /* Fixed-point type and mode nodes. */
10400 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
10401 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
10402 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
10403 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
10404 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
10405 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
10406 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
10407 MAKE_FIXED_MODE_NODE (accum, ha, HA)
10408 MAKE_FIXED_MODE_NODE (accum, sa, SA)
10409 MAKE_FIXED_MODE_NODE (accum, da, DA)
10410 MAKE_FIXED_MODE_NODE (accum, ta, TA)
10411
10412 {
10413 tree t = targetm.build_builtin_va_list ();
10414
10415 /* Many back-ends define record types without setting TYPE_NAME.
10416 If we copied the record type here, we'd keep the original
10417 record type without a name. This breaks name mangling. So,
10418 don't copy record types and let c_common_nodes_and_builtins()
10419 declare the type to be __builtin_va_list. */
10420 if (TREE_CODE (t) != RECORD_TYPE)
10421 t = build_variant_type_copy (t);
10422
10423 va_list_type_node = t;
10424 }
10425 }
10426
10427 /* Modify DECL for given flags.
10428 TM_PURE attribute is set only on types, so the function will modify
10429 DECL's type when ECF_TM_PURE is used. */
10430
10431 void
10432 set_call_expr_flags (tree decl, int flags)
10433 {
10434 if (flags & ECF_NOTHROW)
10435 TREE_NOTHROW (decl) = 1;
10436 if (flags & ECF_CONST)
10437 TREE_READONLY (decl) = 1;
10438 if (flags & ECF_PURE)
10439 DECL_PURE_P (decl) = 1;
10440 if (flags & ECF_LOOPING_CONST_OR_PURE)
10441 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
10442 if (flags & ECF_NOVOPS)
10443 DECL_IS_NOVOPS (decl) = 1;
10444 if (flags & ECF_NORETURN)
10445 TREE_THIS_VOLATILE (decl) = 1;
10446 if (flags & ECF_MALLOC)
10447 DECL_IS_MALLOC (decl) = 1;
10448 if (flags & ECF_RETURNS_TWICE)
10449 DECL_IS_RETURNS_TWICE (decl) = 1;
10450 if (flags & ECF_LEAF)
10451 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
10452 NULL, DECL_ATTRIBUTES (decl));
10453 if (flags & ECF_RET1)
10454 DECL_ATTRIBUTES (decl)
10455 = tree_cons (get_identifier ("fn spec"),
10456 build_tree_list (NULL_TREE, build_string (1, "1")),
10457 DECL_ATTRIBUTES (decl));
10458 if ((flags & ECF_TM_PURE) && flag_tm)
10459 apply_tm_attr (decl, get_identifier ("transaction_pure"));
10460 /* Looping const or pure is implied by noreturn.
10461 There is currently no way to declare looping const or looping pure alone. */
10462 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
10463 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
10464 }
10465
10466
10467 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
10468
10469 static void
10470 local_define_builtin (const char *name, tree type, enum built_in_function code,
10471 const char *library_name, int ecf_flags)
10472 {
10473 tree decl;
10474
10475 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
10476 library_name, NULL_TREE);
10477 set_call_expr_flags (decl, ecf_flags);
10478
10479 set_builtin_decl (code, decl, true);
10480 }
10481
10482 /* Call this function after instantiating all builtins that the language
10483 front end cares about. This will build the rest of the builtins
10484 and internal functions that are relied upon by the tree optimizers and
10485 the middle-end. */
10486
10487 void
10488 build_common_builtin_nodes (void)
10489 {
10490 tree tmp, ftype;
10491 int ecf_flags;
10492
10493 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE)
10494 || !builtin_decl_explicit_p (BUILT_IN_ABORT))
10495 {
10496 ftype = build_function_type (void_type_node, void_list_node);
10497 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
10498 local_define_builtin ("__builtin_unreachable", ftype,
10499 BUILT_IN_UNREACHABLE,
10500 "__builtin_unreachable",
10501 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
10502 | ECF_CONST);
10503 if (!builtin_decl_explicit_p (BUILT_IN_ABORT))
10504 local_define_builtin ("__builtin_abort", ftype, BUILT_IN_ABORT,
10505 "abort",
10506 ECF_LEAF | ECF_NORETURN | ECF_CONST);
10507 }
10508
10509 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
10510 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10511 {
10512 ftype = build_function_type_list (ptr_type_node,
10513 ptr_type_node, const_ptr_type_node,
10514 size_type_node, NULL_TREE);
10515
10516 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
10517 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
10518 "memcpy", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
10519 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10520 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
10521 "memmove", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
10522 }
10523
10524 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
10525 {
10526 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10527 const_ptr_type_node, size_type_node,
10528 NULL_TREE);
10529 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
10530 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10531 }
10532
10533 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
10534 {
10535 ftype = build_function_type_list (ptr_type_node,
10536 ptr_type_node, integer_type_node,
10537 size_type_node, NULL_TREE);
10538 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
10539 "memset", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
10540 }
10541
10542 /* If we're checking the stack, `alloca' can throw. */
10543 const int alloca_flags
10544 = ECF_MALLOC | ECF_LEAF | (flag_stack_check ? 0 : ECF_NOTHROW);
10545
10546 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
10547 {
10548 ftype = build_function_type_list (ptr_type_node,
10549 size_type_node, NULL_TREE);
10550 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
10551 "alloca", alloca_flags);
10552 }
10553
10554 ftype = build_function_type_list (ptr_type_node, size_type_node,
10555 size_type_node, NULL_TREE);
10556 local_define_builtin ("__builtin_alloca_with_align", ftype,
10557 BUILT_IN_ALLOCA_WITH_ALIGN,
10558 "__builtin_alloca_with_align",
10559 alloca_flags);
10560
10561 ftype = build_function_type_list (void_type_node,
10562 ptr_type_node, ptr_type_node,
10563 ptr_type_node, NULL_TREE);
10564 local_define_builtin ("__builtin_init_trampoline", ftype,
10565 BUILT_IN_INIT_TRAMPOLINE,
10566 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
10567 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
10568 BUILT_IN_INIT_HEAP_TRAMPOLINE,
10569 "__builtin_init_heap_trampoline",
10570 ECF_NOTHROW | ECF_LEAF);
10571
10572 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
10573 local_define_builtin ("__builtin_adjust_trampoline", ftype,
10574 BUILT_IN_ADJUST_TRAMPOLINE,
10575 "__builtin_adjust_trampoline",
10576 ECF_CONST | ECF_NOTHROW);
10577
10578 ftype = build_function_type_list (void_type_node,
10579 ptr_type_node, ptr_type_node, NULL_TREE);
10580 local_define_builtin ("__builtin_nonlocal_goto", ftype,
10581 BUILT_IN_NONLOCAL_GOTO,
10582 "__builtin_nonlocal_goto",
10583 ECF_NORETURN | ECF_NOTHROW);
10584
10585 ftype = build_function_type_list (void_type_node,
10586 ptr_type_node, ptr_type_node, NULL_TREE);
10587 local_define_builtin ("__builtin_setjmp_setup", ftype,
10588 BUILT_IN_SETJMP_SETUP,
10589 "__builtin_setjmp_setup", ECF_NOTHROW);
10590
10591 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10592 local_define_builtin ("__builtin_setjmp_receiver", ftype,
10593 BUILT_IN_SETJMP_RECEIVER,
10594 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
10595
10596 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
10597 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
10598 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
10599
10600 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10601 local_define_builtin ("__builtin_stack_restore", ftype,
10602 BUILT_IN_STACK_RESTORE,
10603 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
10604
10605 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10606 const_ptr_type_node, size_type_node,
10607 NULL_TREE);
10608 local_define_builtin ("__builtin_memcmp_eq", ftype, BUILT_IN_MEMCMP_EQ,
10609 "__builtin_memcmp_eq",
10610 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10611
10612 /* If there's a possibility that we might use the ARM EABI, build the
10613 alternate __cxa_end_cleanup node used to resume from C++ and Java. */
10614 if (targetm.arm_eabi_unwinder)
10615 {
10616 ftype = build_function_type_list (void_type_node, NULL_TREE);
10617 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10618 BUILT_IN_CXA_END_CLEANUP,
10619 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10620 }
10621
10622 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10623 local_define_builtin ("__builtin_unwind_resume", ftype,
10624 BUILT_IN_UNWIND_RESUME,
10625 ((targetm_common.except_unwind_info (&global_options)
10626 == UI_SJLJ)
10627 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10628 ECF_NORETURN);
10629
10630 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10631 {
10632 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10633 NULL_TREE);
10634 local_define_builtin ("__builtin_return_address", ftype,
10635 BUILT_IN_RETURN_ADDRESS,
10636 "__builtin_return_address",
10637 ECF_NOTHROW);
10638 }
10639
10640 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10641 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10642 {
10643 ftype = build_function_type_list (void_type_node, ptr_type_node,
10644 ptr_type_node, NULL_TREE);
10645 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10646 local_define_builtin ("__cyg_profile_func_enter", ftype,
10647 BUILT_IN_PROFILE_FUNC_ENTER,
10648 "__cyg_profile_func_enter", 0);
10649 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10650 local_define_builtin ("__cyg_profile_func_exit", ftype,
10651 BUILT_IN_PROFILE_FUNC_EXIT,
10652 "__cyg_profile_func_exit", 0);
10653 }
10654
10655 /* The exception object and filter values from the runtime. The argument
10656 must be zero before exception lowering, i.e. from the front end. After
10657 exception lowering, it will be the region number for the exception
10658 landing pad. These functions are PURE instead of CONST to prevent
10659 them from being hoisted past the exception edge that will initialize
10660 its value in the landing pad. */
10661 ftype = build_function_type_list (ptr_type_node,
10662 integer_type_node, NULL_TREE);
10663 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10664 /* Only use TM_PURE if we have TM language support. */
10665 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10666 ecf_flags |= ECF_TM_PURE;
10667 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10668 "__builtin_eh_pointer", ecf_flags);
10669
10670 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10671 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10672 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10673 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10674
10675 ftype = build_function_type_list (void_type_node,
10676 integer_type_node, integer_type_node,
10677 NULL_TREE);
10678 local_define_builtin ("__builtin_eh_copy_values", ftype,
10679 BUILT_IN_EH_COPY_VALUES,
10680 "__builtin_eh_copy_values", ECF_NOTHROW);
10681
10682 /* Complex multiplication and division. These are handled as builtins
10683 rather than optabs because emit_library_call_value doesn't support
10684 complex. Further, we can do slightly better with folding these
10685 beasties if the real and complex parts of the arguments are separate. */
10686 {
10687 int mode;
10688
10689 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10690 {
10691 char mode_name_buf[4], *q;
10692 const char *p;
10693 enum built_in_function mcode, dcode;
10694 tree type, inner_type;
10695 const char *prefix = "__";
10696
10697 if (targetm.libfunc_gnu_prefix)
10698 prefix = "__gnu_";
10699
10700 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
10701 if (type == NULL)
10702 continue;
10703 inner_type = TREE_TYPE (type);
10704
10705 ftype = build_function_type_list (type, inner_type, inner_type,
10706 inner_type, inner_type, NULL_TREE);
10707
10708 mcode = ((enum built_in_function)
10709 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10710 dcode = ((enum built_in_function)
10711 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10712
10713 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10714 *q = TOLOWER (*p);
10715 *q = '\0';
10716
10717 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10718 NULL);
10719 local_define_builtin (built_in_names[mcode], ftype, mcode,
10720 built_in_names[mcode],
10721 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10722
10723 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10724 NULL);
10725 local_define_builtin (built_in_names[dcode], ftype, dcode,
10726 built_in_names[dcode],
10727 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10728 }
10729 }
10730
10731 init_internal_fns ();
10732 }
10733
10734 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10735 better way.
10736
10737 If we requested a pointer to a vector, build up the pointers that
10738 we stripped off while looking for the inner type. Similarly for
10739 return values from functions.
10740
10741 The argument TYPE is the top of the chain, and BOTTOM is the
10742 new type which we will point to. */
10743
10744 tree
10745 reconstruct_complex_type (tree type, tree bottom)
10746 {
10747 tree inner, outer;
10748
10749 if (TREE_CODE (type) == POINTER_TYPE)
10750 {
10751 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10752 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10753 TYPE_REF_CAN_ALIAS_ALL (type));
10754 }
10755 else if (TREE_CODE (type) == REFERENCE_TYPE)
10756 {
10757 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10758 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10759 TYPE_REF_CAN_ALIAS_ALL (type));
10760 }
10761 else if (TREE_CODE (type) == ARRAY_TYPE)
10762 {
10763 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10764 outer = build_array_type (inner, TYPE_DOMAIN (type));
10765 }
10766 else if (TREE_CODE (type) == FUNCTION_TYPE)
10767 {
10768 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10769 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10770 }
10771 else if (TREE_CODE (type) == METHOD_TYPE)
10772 {
10773 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10774 /* The build_method_type_directly() routine prepends 'this' to argument list,
10775 so we must compensate by getting rid of it. */
10776 outer
10777 = build_method_type_directly
10778 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10779 inner,
10780 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10781 }
10782 else if (TREE_CODE (type) == OFFSET_TYPE)
10783 {
10784 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10785 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10786 }
10787 else
10788 return bottom;
10789
10790 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10791 TYPE_QUALS (type));
10792 }
10793
10794 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10795 the inner type. */
10796 tree
10797 build_vector_type_for_mode (tree innertype, machine_mode mode)
10798 {
10799 int nunits;
10800
10801 switch (GET_MODE_CLASS (mode))
10802 {
10803 case MODE_VECTOR_INT:
10804 case MODE_VECTOR_FLOAT:
10805 case MODE_VECTOR_FRACT:
10806 case MODE_VECTOR_UFRACT:
10807 case MODE_VECTOR_ACCUM:
10808 case MODE_VECTOR_UACCUM:
10809 nunits = GET_MODE_NUNITS (mode);
10810 break;
10811
10812 case MODE_INT:
10813 /* Check that there are no leftover bits. */
10814 gcc_assert (GET_MODE_BITSIZE (mode)
10815 % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10816
10817 nunits = GET_MODE_BITSIZE (mode)
10818 / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10819 break;
10820
10821 default:
10822 gcc_unreachable ();
10823 }
10824
10825 return make_vector_type (innertype, nunits, mode);
10826 }
10827
10828 /* Similarly, but takes the inner type and number of units, which must be
10829 a power of two. */
10830
10831 tree
10832 build_vector_type (tree innertype, int nunits)
10833 {
10834 return make_vector_type (innertype, nunits, VOIDmode);
10835 }
10836
10837 /* Build truth vector with specified length and number of units. */
10838
10839 tree
10840 build_truth_vector_type (unsigned nunits, unsigned vector_size)
10841 {
10842 machine_mode mask_mode = targetm.vectorize.get_mask_mode (nunits,
10843 vector_size);
10844
10845 gcc_assert (mask_mode != VOIDmode);
10846
10847 unsigned HOST_WIDE_INT vsize;
10848 if (mask_mode == BLKmode)
10849 vsize = vector_size * BITS_PER_UNIT;
10850 else
10851 vsize = GET_MODE_BITSIZE (mask_mode);
10852
10853 unsigned HOST_WIDE_INT esize = vsize / nunits;
10854 gcc_assert (esize * nunits == vsize);
10855
10856 tree bool_type = build_nonstandard_boolean_type (esize);
10857
10858 return make_vector_type (bool_type, nunits, mask_mode);
10859 }
10860
10861 /* Returns a vector type corresponding to a comparison of VECTYPE. */
10862
10863 tree
10864 build_same_sized_truth_vector_type (tree vectype)
10865 {
10866 if (VECTOR_BOOLEAN_TYPE_P (vectype))
10867 return vectype;
10868
10869 unsigned HOST_WIDE_INT size = GET_MODE_SIZE (TYPE_MODE (vectype));
10870
10871 if (!size)
10872 size = tree_to_uhwi (TYPE_SIZE_UNIT (vectype));
10873
10874 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (vectype), size);
10875 }
10876
10877 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10878
10879 tree
10880 build_opaque_vector_type (tree innertype, int nunits)
10881 {
10882 tree t = make_vector_type (innertype, nunits, VOIDmode);
10883 tree cand;
10884 /* We always build the non-opaque variant before the opaque one,
10885 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10886 cand = TYPE_NEXT_VARIANT (t);
10887 if (cand
10888 && TYPE_VECTOR_OPAQUE (cand)
10889 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10890 return cand;
10891 /* Othewise build a variant type and make sure to queue it after
10892 the non-opaque type. */
10893 cand = build_distinct_type_copy (t);
10894 TYPE_VECTOR_OPAQUE (cand) = true;
10895 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10896 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10897 TYPE_NEXT_VARIANT (t) = cand;
10898 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10899 return cand;
10900 }
10901
10902
10903 /* Given an initializer INIT, return TRUE if INIT is zero or some
10904 aggregate of zeros. Otherwise return FALSE. */
10905 bool
10906 initializer_zerop (const_tree init)
10907 {
10908 tree elt;
10909
10910 STRIP_NOPS (init);
10911
10912 switch (TREE_CODE (init))
10913 {
10914 case INTEGER_CST:
10915 return integer_zerop (init);
10916
10917 case REAL_CST:
10918 /* ??? Note that this is not correct for C4X float formats. There,
10919 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10920 negative exponent. */
10921 return real_zerop (init)
10922 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
10923
10924 case FIXED_CST:
10925 return fixed_zerop (init);
10926
10927 case COMPLEX_CST:
10928 return integer_zerop (init)
10929 || (real_zerop (init)
10930 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10931 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
10932
10933 case VECTOR_CST:
10934 {
10935 unsigned i;
10936 for (i = 0; i < VECTOR_CST_NELTS (init); ++i)
10937 if (!initializer_zerop (VECTOR_CST_ELT (init, i)))
10938 return false;
10939 return true;
10940 }
10941
10942 case CONSTRUCTOR:
10943 {
10944 unsigned HOST_WIDE_INT idx;
10945
10946 if (TREE_CLOBBER_P (init))
10947 return false;
10948 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10949 if (!initializer_zerop (elt))
10950 return false;
10951 return true;
10952 }
10953
10954 case STRING_CST:
10955 {
10956 int i;
10957
10958 /* We need to loop through all elements to handle cases like
10959 "\0" and "\0foobar". */
10960 for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
10961 if (TREE_STRING_POINTER (init)[i] != '\0')
10962 return false;
10963
10964 return true;
10965 }
10966
10967 default:
10968 return false;
10969 }
10970 }
10971
10972 /* Check if vector VEC consists of all the equal elements and
10973 that the number of elements corresponds to the type of VEC.
10974 The function returns first element of the vector
10975 or NULL_TREE if the vector is not uniform. */
10976 tree
10977 uniform_vector_p (const_tree vec)
10978 {
10979 tree first, t;
10980 unsigned i;
10981
10982 if (vec == NULL_TREE)
10983 return NULL_TREE;
10984
10985 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10986
10987 if (TREE_CODE (vec) == VECTOR_CST)
10988 {
10989 first = VECTOR_CST_ELT (vec, 0);
10990 for (i = 1; i < VECTOR_CST_NELTS (vec); ++i)
10991 if (!operand_equal_p (first, VECTOR_CST_ELT (vec, i), 0))
10992 return NULL_TREE;
10993
10994 return first;
10995 }
10996
10997 else if (TREE_CODE (vec) == CONSTRUCTOR)
10998 {
10999 first = error_mark_node;
11000
11001 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
11002 {
11003 if (i == 0)
11004 {
11005 first = t;
11006 continue;
11007 }
11008 if (!operand_equal_p (first, t, 0))
11009 return NULL_TREE;
11010 }
11011 if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)))
11012 return NULL_TREE;
11013
11014 return first;
11015 }
11016
11017 return NULL_TREE;
11018 }
11019
11020 /* Build an empty statement at location LOC. */
11021
11022 tree
11023 build_empty_stmt (location_t loc)
11024 {
11025 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
11026 SET_EXPR_LOCATION (t, loc);
11027 return t;
11028 }
11029
11030
11031 /* Build an OpenMP clause with code CODE. LOC is the location of the
11032 clause. */
11033
11034 tree
11035 build_omp_clause (location_t loc, enum omp_clause_code code)
11036 {
11037 tree t;
11038 int size, length;
11039
11040 length = omp_clause_num_ops[code];
11041 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
11042
11043 record_node_allocation_statistics (OMP_CLAUSE, size);
11044
11045 t = (tree) ggc_internal_alloc (size);
11046 memset (t, 0, size);
11047 TREE_SET_CODE (t, OMP_CLAUSE);
11048 OMP_CLAUSE_SET_CODE (t, code);
11049 OMP_CLAUSE_LOCATION (t) = loc;
11050
11051 return t;
11052 }
11053
11054 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
11055 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
11056 Except for the CODE and operand count field, other storage for the
11057 object is initialized to zeros. */
11058
11059 tree
11060 build_vl_exp_stat (enum tree_code code, int len MEM_STAT_DECL)
11061 {
11062 tree t;
11063 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
11064
11065 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
11066 gcc_assert (len >= 1);
11067
11068 record_node_allocation_statistics (code, length);
11069
11070 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
11071
11072 TREE_SET_CODE (t, code);
11073
11074 /* Can't use TREE_OPERAND to store the length because if checking is
11075 enabled, it will try to check the length before we store it. :-P */
11076 t->exp.operands[0] = build_int_cst (sizetype, len);
11077
11078 return t;
11079 }
11080
11081 /* Helper function for build_call_* functions; build a CALL_EXPR with
11082 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
11083 the argument slots. */
11084
11085 static tree
11086 build_call_1 (tree return_type, tree fn, int nargs)
11087 {
11088 tree t;
11089
11090 t = build_vl_exp (CALL_EXPR, nargs + 3);
11091 TREE_TYPE (t) = return_type;
11092 CALL_EXPR_FN (t) = fn;
11093 CALL_EXPR_STATIC_CHAIN (t) = NULL;
11094
11095 return t;
11096 }
11097
11098 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11099 FN and a null static chain slot. NARGS is the number of call arguments
11100 which are specified as "..." arguments. */
11101
11102 tree
11103 build_call_nary (tree return_type, tree fn, int nargs, ...)
11104 {
11105 tree ret;
11106 va_list args;
11107 va_start (args, nargs);
11108 ret = build_call_valist (return_type, fn, nargs, args);
11109 va_end (args);
11110 return ret;
11111 }
11112
11113 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11114 FN and a null static chain slot. NARGS is the number of call arguments
11115 which are specified as a va_list ARGS. */
11116
11117 tree
11118 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
11119 {
11120 tree t;
11121 int i;
11122
11123 t = build_call_1 (return_type, fn, nargs);
11124 for (i = 0; i < nargs; i++)
11125 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
11126 process_call_operands (t);
11127 return t;
11128 }
11129
11130 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11131 FN and a null static chain slot. NARGS is the number of call arguments
11132 which are specified as a tree array ARGS. */
11133
11134 tree
11135 build_call_array_loc (location_t loc, tree return_type, tree fn,
11136 int nargs, const tree *args)
11137 {
11138 tree t;
11139 int i;
11140
11141 t = build_call_1 (return_type, fn, nargs);
11142 for (i = 0; i < nargs; i++)
11143 CALL_EXPR_ARG (t, i) = args[i];
11144 process_call_operands (t);
11145 SET_EXPR_LOCATION (t, loc);
11146 return t;
11147 }
11148
11149 /* Like build_call_array, but takes a vec. */
11150
11151 tree
11152 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
11153 {
11154 tree ret, t;
11155 unsigned int ix;
11156
11157 ret = build_call_1 (return_type, fn, vec_safe_length (args));
11158 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
11159 CALL_EXPR_ARG (ret, ix) = t;
11160 process_call_operands (ret);
11161 return ret;
11162 }
11163
11164 /* Conveniently construct a function call expression. FNDECL names the
11165 function to be called and N arguments are passed in the array
11166 ARGARRAY. */
11167
11168 tree
11169 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
11170 {
11171 tree fntype = TREE_TYPE (fndecl);
11172 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11173
11174 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
11175 }
11176
11177 /* Conveniently construct a function call expression. FNDECL names the
11178 function to be called and the arguments are passed in the vector
11179 VEC. */
11180
11181 tree
11182 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
11183 {
11184 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
11185 vec_safe_address (vec));
11186 }
11187
11188
11189 /* Conveniently construct a function call expression. FNDECL names the
11190 function to be called, N is the number of arguments, and the "..."
11191 parameters are the argument expressions. */
11192
11193 tree
11194 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11195 {
11196 va_list ap;
11197 tree *argarray = XALLOCAVEC (tree, n);
11198 int i;
11199
11200 va_start (ap, n);
11201 for (i = 0; i < n; i++)
11202 argarray[i] = va_arg (ap, tree);
11203 va_end (ap);
11204 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11205 }
11206
11207 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11208 varargs macros aren't supported by all bootstrap compilers. */
11209
11210 tree
11211 build_call_expr (tree fndecl, int n, ...)
11212 {
11213 va_list ap;
11214 tree *argarray = XALLOCAVEC (tree, n);
11215 int i;
11216
11217 va_start (ap, n);
11218 for (i = 0; i < n; i++)
11219 argarray[i] = va_arg (ap, tree);
11220 va_end (ap);
11221 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
11222 }
11223
11224 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
11225 type TYPE. This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
11226 It will get gimplified later into an ordinary internal function. */
11227
11228 tree
11229 build_call_expr_internal_loc_array (location_t loc, internal_fn ifn,
11230 tree type, int n, const tree *args)
11231 {
11232 tree t = build_call_1 (type, NULL_TREE, n);
11233 for (int i = 0; i < n; ++i)
11234 CALL_EXPR_ARG (t, i) = args[i];
11235 SET_EXPR_LOCATION (t, loc);
11236 CALL_EXPR_IFN (t) = ifn;
11237 return t;
11238 }
11239
11240 /* Build internal call expression. This is just like CALL_EXPR, except
11241 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
11242 internal function. */
11243
11244 tree
11245 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
11246 tree type, int n, ...)
11247 {
11248 va_list ap;
11249 tree *argarray = XALLOCAVEC (tree, n);
11250 int i;
11251
11252 va_start (ap, n);
11253 for (i = 0; i < n; i++)
11254 argarray[i] = va_arg (ap, tree);
11255 va_end (ap);
11256 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
11257 }
11258
11259 /* Return a function call to FN, if the target is guaranteed to support it,
11260 or null otherwise.
11261
11262 N is the number of arguments, passed in the "...", and TYPE is the
11263 type of the return value. */
11264
11265 tree
11266 maybe_build_call_expr_loc (location_t loc, combined_fn fn, tree type,
11267 int n, ...)
11268 {
11269 va_list ap;
11270 tree *argarray = XALLOCAVEC (tree, n);
11271 int i;
11272
11273 va_start (ap, n);
11274 for (i = 0; i < n; i++)
11275 argarray[i] = va_arg (ap, tree);
11276 va_end (ap);
11277 if (internal_fn_p (fn))
11278 {
11279 internal_fn ifn = as_internal_fn (fn);
11280 if (direct_internal_fn_p (ifn))
11281 {
11282 tree_pair types = direct_internal_fn_types (ifn, type, argarray);
11283 if (!direct_internal_fn_supported_p (ifn, types,
11284 OPTIMIZE_FOR_BOTH))
11285 return NULL_TREE;
11286 }
11287 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
11288 }
11289 else
11290 {
11291 tree fndecl = builtin_decl_implicit (as_builtin_fn (fn));
11292 if (!fndecl)
11293 return NULL_TREE;
11294 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11295 }
11296 }
11297
11298 /* Create a new constant string literal and return a char* pointer to it.
11299 The STRING_CST value is the LEN characters at STR. */
11300 tree
11301 build_string_literal (int len, const char *str)
11302 {
11303 tree t, elem, index, type;
11304
11305 t = build_string (len, str);
11306 elem = build_type_variant (char_type_node, 1, 0);
11307 index = build_index_type (size_int (len - 1));
11308 type = build_array_type (elem, index);
11309 TREE_TYPE (t) = type;
11310 TREE_CONSTANT (t) = 1;
11311 TREE_READONLY (t) = 1;
11312 TREE_STATIC (t) = 1;
11313
11314 type = build_pointer_type (elem);
11315 t = build1 (ADDR_EXPR, type,
11316 build4 (ARRAY_REF, elem,
11317 t, integer_zero_node, NULL_TREE, NULL_TREE));
11318 return t;
11319 }
11320
11321
11322
11323 /* Return true if T (assumed to be a DECL) must be assigned a memory
11324 location. */
11325
11326 bool
11327 needs_to_live_in_memory (const_tree t)
11328 {
11329 return (TREE_ADDRESSABLE (t)
11330 || is_global_var (t)
11331 || (TREE_CODE (t) == RESULT_DECL
11332 && !DECL_BY_REFERENCE (t)
11333 && aggregate_value_p (t, current_function_decl)));
11334 }
11335
11336 /* Return value of a constant X and sign-extend it. */
11337
11338 HOST_WIDE_INT
11339 int_cst_value (const_tree x)
11340 {
11341 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
11342 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
11343
11344 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
11345 gcc_assert (cst_and_fits_in_hwi (x));
11346
11347 if (bits < HOST_BITS_PER_WIDE_INT)
11348 {
11349 bool negative = ((val >> (bits - 1)) & 1) != 0;
11350 if (negative)
11351 val |= (~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1;
11352 else
11353 val &= ~((~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1);
11354 }
11355
11356 return val;
11357 }
11358
11359 /* If TYPE is an integral or pointer type, return an integer type with
11360 the same precision which is unsigned iff UNSIGNEDP is true, or itself
11361 if TYPE is already an integer type of signedness UNSIGNEDP. */
11362
11363 tree
11364 signed_or_unsigned_type_for (int unsignedp, tree type)
11365 {
11366 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
11367 return type;
11368
11369 if (TREE_CODE (type) == VECTOR_TYPE)
11370 {
11371 tree inner = TREE_TYPE (type);
11372 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11373 if (!inner2)
11374 return NULL_TREE;
11375 if (inner == inner2)
11376 return type;
11377 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
11378 }
11379
11380 if (!INTEGRAL_TYPE_P (type)
11381 && !POINTER_TYPE_P (type)
11382 && TREE_CODE (type) != OFFSET_TYPE)
11383 return NULL_TREE;
11384
11385 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
11386 }
11387
11388 /* If TYPE is an integral or pointer type, return an integer type with
11389 the same precision which is unsigned, or itself if TYPE is already an
11390 unsigned integer type. */
11391
11392 tree
11393 unsigned_type_for (tree type)
11394 {
11395 return signed_or_unsigned_type_for (1, type);
11396 }
11397
11398 /* If TYPE is an integral or pointer type, return an integer type with
11399 the same precision which is signed, or itself if TYPE is already a
11400 signed integer type. */
11401
11402 tree
11403 signed_type_for (tree type)
11404 {
11405 return signed_or_unsigned_type_for (0, type);
11406 }
11407
11408 /* If TYPE is a vector type, return a signed integer vector type with the
11409 same width and number of subparts. Otherwise return boolean_type_node. */
11410
11411 tree
11412 truth_type_for (tree type)
11413 {
11414 if (TREE_CODE (type) == VECTOR_TYPE)
11415 {
11416 if (VECTOR_BOOLEAN_TYPE_P (type))
11417 return type;
11418 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (type),
11419 GET_MODE_SIZE (TYPE_MODE (type)));
11420 }
11421 else
11422 return boolean_type_node;
11423 }
11424
11425 /* Returns the largest value obtainable by casting something in INNER type to
11426 OUTER type. */
11427
11428 tree
11429 upper_bound_in_type (tree outer, tree inner)
11430 {
11431 unsigned int det = 0;
11432 unsigned oprec = TYPE_PRECISION (outer);
11433 unsigned iprec = TYPE_PRECISION (inner);
11434 unsigned prec;
11435
11436 /* Compute a unique number for every combination. */
11437 det |= (oprec > iprec) ? 4 : 0;
11438 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
11439 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
11440
11441 /* Determine the exponent to use. */
11442 switch (det)
11443 {
11444 case 0:
11445 case 1:
11446 /* oprec <= iprec, outer: signed, inner: don't care. */
11447 prec = oprec - 1;
11448 break;
11449 case 2:
11450 case 3:
11451 /* oprec <= iprec, outer: unsigned, inner: don't care. */
11452 prec = oprec;
11453 break;
11454 case 4:
11455 /* oprec > iprec, outer: signed, inner: signed. */
11456 prec = iprec - 1;
11457 break;
11458 case 5:
11459 /* oprec > iprec, outer: signed, inner: unsigned. */
11460 prec = iprec;
11461 break;
11462 case 6:
11463 /* oprec > iprec, outer: unsigned, inner: signed. */
11464 prec = oprec;
11465 break;
11466 case 7:
11467 /* oprec > iprec, outer: unsigned, inner: unsigned. */
11468 prec = iprec;
11469 break;
11470 default:
11471 gcc_unreachable ();
11472 }
11473
11474 return wide_int_to_tree (outer,
11475 wi::mask (prec, false, TYPE_PRECISION (outer)));
11476 }
11477
11478 /* Returns the smallest value obtainable by casting something in INNER type to
11479 OUTER type. */
11480
11481 tree
11482 lower_bound_in_type (tree outer, tree inner)
11483 {
11484 unsigned oprec = TYPE_PRECISION (outer);
11485 unsigned iprec = TYPE_PRECISION (inner);
11486
11487 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
11488 and obtain 0. */
11489 if (TYPE_UNSIGNED (outer)
11490 /* If we are widening something of an unsigned type, OUTER type
11491 contains all values of INNER type. In particular, both INNER
11492 and OUTER types have zero in common. */
11493 || (oprec > iprec && TYPE_UNSIGNED (inner)))
11494 return build_int_cst (outer, 0);
11495 else
11496 {
11497 /* If we are widening a signed type to another signed type, we
11498 want to obtain -2^^(iprec-1). If we are keeping the
11499 precision or narrowing to a signed type, we want to obtain
11500 -2^(oprec-1). */
11501 unsigned prec = oprec > iprec ? iprec : oprec;
11502 return wide_int_to_tree (outer,
11503 wi::mask (prec - 1, true,
11504 TYPE_PRECISION (outer)));
11505 }
11506 }
11507
11508 /* Return nonzero if two operands that are suitable for PHI nodes are
11509 necessarily equal. Specifically, both ARG0 and ARG1 must be either
11510 SSA_NAME or invariant. Note that this is strictly an optimization.
11511 That is, callers of this function can directly call operand_equal_p
11512 and get the same result, only slower. */
11513
11514 int
11515 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
11516 {
11517 if (arg0 == arg1)
11518 return 1;
11519 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
11520 return 0;
11521 return operand_equal_p (arg0, arg1, 0);
11522 }
11523
11524 /* Returns number of zeros at the end of binary representation of X. */
11525
11526 tree
11527 num_ending_zeros (const_tree x)
11528 {
11529 return build_int_cst (TREE_TYPE (x), wi::ctz (x));
11530 }
11531
11532
11533 #define WALK_SUBTREE(NODE) \
11534 do \
11535 { \
11536 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
11537 if (result) \
11538 return result; \
11539 } \
11540 while (0)
11541
11542 /* This is a subroutine of walk_tree that walks field of TYPE that are to
11543 be walked whenever a type is seen in the tree. Rest of operands and return
11544 value are as for walk_tree. */
11545
11546 static tree
11547 walk_type_fields (tree type, walk_tree_fn func, void *data,
11548 hash_set<tree> *pset, walk_tree_lh lh)
11549 {
11550 tree result = NULL_TREE;
11551
11552 switch (TREE_CODE (type))
11553 {
11554 case POINTER_TYPE:
11555 case REFERENCE_TYPE:
11556 case VECTOR_TYPE:
11557 /* We have to worry about mutually recursive pointers. These can't
11558 be written in C. They can in Ada. It's pathological, but
11559 there's an ACATS test (c38102a) that checks it. Deal with this
11560 by checking if we're pointing to another pointer, that one
11561 points to another pointer, that one does too, and we have no htab.
11562 If so, get a hash table. We check three levels deep to avoid
11563 the cost of the hash table if we don't need one. */
11564 if (POINTER_TYPE_P (TREE_TYPE (type))
11565 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
11566 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
11567 && !pset)
11568 {
11569 result = walk_tree_without_duplicates (&TREE_TYPE (type),
11570 func, data);
11571 if (result)
11572 return result;
11573
11574 break;
11575 }
11576
11577 /* ... fall through ... */
11578
11579 case COMPLEX_TYPE:
11580 WALK_SUBTREE (TREE_TYPE (type));
11581 break;
11582
11583 case METHOD_TYPE:
11584 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
11585
11586 /* Fall through. */
11587
11588 case FUNCTION_TYPE:
11589 WALK_SUBTREE (TREE_TYPE (type));
11590 {
11591 tree arg;
11592
11593 /* We never want to walk into default arguments. */
11594 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
11595 WALK_SUBTREE (TREE_VALUE (arg));
11596 }
11597 break;
11598
11599 case ARRAY_TYPE:
11600 /* Don't follow this nodes's type if a pointer for fear that
11601 we'll have infinite recursion. If we have a PSET, then we
11602 need not fear. */
11603 if (pset
11604 || (!POINTER_TYPE_P (TREE_TYPE (type))
11605 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
11606 WALK_SUBTREE (TREE_TYPE (type));
11607 WALK_SUBTREE (TYPE_DOMAIN (type));
11608 break;
11609
11610 case OFFSET_TYPE:
11611 WALK_SUBTREE (TREE_TYPE (type));
11612 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
11613 break;
11614
11615 default:
11616 break;
11617 }
11618
11619 return NULL_TREE;
11620 }
11621
11622 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11623 called with the DATA and the address of each sub-tree. If FUNC returns a
11624 non-NULL value, the traversal is stopped, and the value returned by FUNC
11625 is returned. If PSET is non-NULL it is used to record the nodes visited,
11626 and to avoid visiting a node more than once. */
11627
11628 tree
11629 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
11630 hash_set<tree> *pset, walk_tree_lh lh)
11631 {
11632 enum tree_code code;
11633 int walk_subtrees;
11634 tree result;
11635
11636 #define WALK_SUBTREE_TAIL(NODE) \
11637 do \
11638 { \
11639 tp = & (NODE); \
11640 goto tail_recurse; \
11641 } \
11642 while (0)
11643
11644 tail_recurse:
11645 /* Skip empty subtrees. */
11646 if (!*tp)
11647 return NULL_TREE;
11648
11649 /* Don't walk the same tree twice, if the user has requested
11650 that we avoid doing so. */
11651 if (pset && pset->add (*tp))
11652 return NULL_TREE;
11653
11654 /* Call the function. */
11655 walk_subtrees = 1;
11656 result = (*func) (tp, &walk_subtrees, data);
11657
11658 /* If we found something, return it. */
11659 if (result)
11660 return result;
11661
11662 code = TREE_CODE (*tp);
11663
11664 /* Even if we didn't, FUNC may have decided that there was nothing
11665 interesting below this point in the tree. */
11666 if (!walk_subtrees)
11667 {
11668 /* But we still need to check our siblings. */
11669 if (code == TREE_LIST)
11670 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11671 else if (code == OMP_CLAUSE)
11672 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11673 else
11674 return NULL_TREE;
11675 }
11676
11677 if (lh)
11678 {
11679 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11680 if (result || !walk_subtrees)
11681 return result;
11682 }
11683
11684 switch (code)
11685 {
11686 case ERROR_MARK:
11687 case IDENTIFIER_NODE:
11688 case INTEGER_CST:
11689 case REAL_CST:
11690 case FIXED_CST:
11691 case VECTOR_CST:
11692 case STRING_CST:
11693 case BLOCK:
11694 case PLACEHOLDER_EXPR:
11695 case SSA_NAME:
11696 case FIELD_DECL:
11697 case RESULT_DECL:
11698 /* None of these have subtrees other than those already walked
11699 above. */
11700 break;
11701
11702 case TREE_LIST:
11703 WALK_SUBTREE (TREE_VALUE (*tp));
11704 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11705 break;
11706
11707 case TREE_VEC:
11708 {
11709 int len = TREE_VEC_LENGTH (*tp);
11710
11711 if (len == 0)
11712 break;
11713
11714 /* Walk all elements but the first. */
11715 while (--len)
11716 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11717
11718 /* Now walk the first one as a tail call. */
11719 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11720 }
11721
11722 case COMPLEX_CST:
11723 WALK_SUBTREE (TREE_REALPART (*tp));
11724 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11725
11726 case CONSTRUCTOR:
11727 {
11728 unsigned HOST_WIDE_INT idx;
11729 constructor_elt *ce;
11730
11731 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11732 idx++)
11733 WALK_SUBTREE (ce->value);
11734 }
11735 break;
11736
11737 case SAVE_EXPR:
11738 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11739
11740 case BIND_EXPR:
11741 {
11742 tree decl;
11743 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11744 {
11745 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11746 into declarations that are just mentioned, rather than
11747 declared; they don't really belong to this part of the tree.
11748 And, we can see cycles: the initializer for a declaration
11749 can refer to the declaration itself. */
11750 WALK_SUBTREE (DECL_INITIAL (decl));
11751 WALK_SUBTREE (DECL_SIZE (decl));
11752 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11753 }
11754 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11755 }
11756
11757 case STATEMENT_LIST:
11758 {
11759 tree_stmt_iterator i;
11760 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11761 WALK_SUBTREE (*tsi_stmt_ptr (i));
11762 }
11763 break;
11764
11765 case OMP_CLAUSE:
11766 switch (OMP_CLAUSE_CODE (*tp))
11767 {
11768 case OMP_CLAUSE_GANG:
11769 case OMP_CLAUSE__GRIDDIM_:
11770 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11771 /* FALLTHRU */
11772
11773 case OMP_CLAUSE_ASYNC:
11774 case OMP_CLAUSE_WAIT:
11775 case OMP_CLAUSE_WORKER:
11776 case OMP_CLAUSE_VECTOR:
11777 case OMP_CLAUSE_NUM_GANGS:
11778 case OMP_CLAUSE_NUM_WORKERS:
11779 case OMP_CLAUSE_VECTOR_LENGTH:
11780 case OMP_CLAUSE_PRIVATE:
11781 case OMP_CLAUSE_SHARED:
11782 case OMP_CLAUSE_FIRSTPRIVATE:
11783 case OMP_CLAUSE_COPYIN:
11784 case OMP_CLAUSE_COPYPRIVATE:
11785 case OMP_CLAUSE_FINAL:
11786 case OMP_CLAUSE_IF:
11787 case OMP_CLAUSE_NUM_THREADS:
11788 case OMP_CLAUSE_SCHEDULE:
11789 case OMP_CLAUSE_UNIFORM:
11790 case OMP_CLAUSE_DEPEND:
11791 case OMP_CLAUSE_NUM_TEAMS:
11792 case OMP_CLAUSE_THREAD_LIMIT:
11793 case OMP_CLAUSE_DEVICE:
11794 case OMP_CLAUSE_DIST_SCHEDULE:
11795 case OMP_CLAUSE_SAFELEN:
11796 case OMP_CLAUSE_SIMDLEN:
11797 case OMP_CLAUSE_ORDERED:
11798 case OMP_CLAUSE_PRIORITY:
11799 case OMP_CLAUSE_GRAINSIZE:
11800 case OMP_CLAUSE_NUM_TASKS:
11801 case OMP_CLAUSE_HINT:
11802 case OMP_CLAUSE_TO_DECLARE:
11803 case OMP_CLAUSE_LINK:
11804 case OMP_CLAUSE_USE_DEVICE_PTR:
11805 case OMP_CLAUSE_IS_DEVICE_PTR:
11806 case OMP_CLAUSE__LOOPTEMP_:
11807 case OMP_CLAUSE__SIMDUID_:
11808 case OMP_CLAUSE__CILK_FOR_COUNT_:
11809 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11810 /* FALLTHRU */
11811
11812 case OMP_CLAUSE_INDEPENDENT:
11813 case OMP_CLAUSE_NOWAIT:
11814 case OMP_CLAUSE_DEFAULT:
11815 case OMP_CLAUSE_UNTIED:
11816 case OMP_CLAUSE_MERGEABLE:
11817 case OMP_CLAUSE_PROC_BIND:
11818 case OMP_CLAUSE_INBRANCH:
11819 case OMP_CLAUSE_NOTINBRANCH:
11820 case OMP_CLAUSE_FOR:
11821 case OMP_CLAUSE_PARALLEL:
11822 case OMP_CLAUSE_SECTIONS:
11823 case OMP_CLAUSE_TASKGROUP:
11824 case OMP_CLAUSE_NOGROUP:
11825 case OMP_CLAUSE_THREADS:
11826 case OMP_CLAUSE_SIMD:
11827 case OMP_CLAUSE_DEFAULTMAP:
11828 case OMP_CLAUSE_AUTO:
11829 case OMP_CLAUSE_SEQ:
11830 case OMP_CLAUSE_TILE:
11831 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11832
11833 case OMP_CLAUSE_LASTPRIVATE:
11834 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11835 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11836 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11837
11838 case OMP_CLAUSE_COLLAPSE:
11839 {
11840 int i;
11841 for (i = 0; i < 3; i++)
11842 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11843 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11844 }
11845
11846 case OMP_CLAUSE_LINEAR:
11847 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11848 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
11849 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
11850 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11851
11852 case OMP_CLAUSE_ALIGNED:
11853 case OMP_CLAUSE_FROM:
11854 case OMP_CLAUSE_TO:
11855 case OMP_CLAUSE_MAP:
11856 case OMP_CLAUSE__CACHE_:
11857 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11858 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11859 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11860
11861 case OMP_CLAUSE_REDUCTION:
11862 {
11863 int i;
11864 for (i = 0; i < 5; i++)
11865 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11866 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11867 }
11868
11869 default:
11870 gcc_unreachable ();
11871 }
11872 break;
11873
11874 case TARGET_EXPR:
11875 {
11876 int i, len;
11877
11878 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11879 But, we only want to walk once. */
11880 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11881 for (i = 0; i < len; ++i)
11882 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11883 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11884 }
11885
11886 case DECL_EXPR:
11887 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11888 defining. We only want to walk into these fields of a type in this
11889 case and not in the general case of a mere reference to the type.
11890
11891 The criterion is as follows: if the field can be an expression, it
11892 must be walked only here. This should be in keeping with the fields
11893 that are directly gimplified in gimplify_type_sizes in order for the
11894 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11895 variable-sized types.
11896
11897 Note that DECLs get walked as part of processing the BIND_EXPR. */
11898 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11899 {
11900 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11901 if (TREE_CODE (*type_p) == ERROR_MARK)
11902 return NULL_TREE;
11903
11904 /* Call the function for the type. See if it returns anything or
11905 doesn't want us to continue. If we are to continue, walk both
11906 the normal fields and those for the declaration case. */
11907 result = (*func) (type_p, &walk_subtrees, data);
11908 if (result || !walk_subtrees)
11909 return result;
11910
11911 /* But do not walk a pointed-to type since it may itself need to
11912 be walked in the declaration case if it isn't anonymous. */
11913 if (!POINTER_TYPE_P (*type_p))
11914 {
11915 result = walk_type_fields (*type_p, func, data, pset, lh);
11916 if (result)
11917 return result;
11918 }
11919
11920 /* If this is a record type, also walk the fields. */
11921 if (RECORD_OR_UNION_TYPE_P (*type_p))
11922 {
11923 tree field;
11924
11925 for (field = TYPE_FIELDS (*type_p); field;
11926 field = DECL_CHAIN (field))
11927 {
11928 /* We'd like to look at the type of the field, but we can
11929 easily get infinite recursion. So assume it's pointed
11930 to elsewhere in the tree. Also, ignore things that
11931 aren't fields. */
11932 if (TREE_CODE (field) != FIELD_DECL)
11933 continue;
11934
11935 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11936 WALK_SUBTREE (DECL_SIZE (field));
11937 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11938 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11939 WALK_SUBTREE (DECL_QUALIFIER (field));
11940 }
11941 }
11942
11943 /* Same for scalar types. */
11944 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11945 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11946 || TREE_CODE (*type_p) == INTEGER_TYPE
11947 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11948 || TREE_CODE (*type_p) == REAL_TYPE)
11949 {
11950 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11951 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11952 }
11953
11954 WALK_SUBTREE (TYPE_SIZE (*type_p));
11955 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11956 }
11957 /* FALLTHRU */
11958
11959 default:
11960 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11961 {
11962 int i, len;
11963
11964 /* Walk over all the sub-trees of this operand. */
11965 len = TREE_OPERAND_LENGTH (*tp);
11966
11967 /* Go through the subtrees. We need to do this in forward order so
11968 that the scope of a FOR_EXPR is handled properly. */
11969 if (len)
11970 {
11971 for (i = 0; i < len - 1; ++i)
11972 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11973 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11974 }
11975 }
11976 /* If this is a type, walk the needed fields in the type. */
11977 else if (TYPE_P (*tp))
11978 return walk_type_fields (*tp, func, data, pset, lh);
11979 break;
11980 }
11981
11982 /* We didn't find what we were looking for. */
11983 return NULL_TREE;
11984
11985 #undef WALK_SUBTREE_TAIL
11986 }
11987 #undef WALK_SUBTREE
11988
11989 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11990
11991 tree
11992 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11993 walk_tree_lh lh)
11994 {
11995 tree result;
11996
11997 hash_set<tree> pset;
11998 result = walk_tree_1 (tp, func, data, &pset, lh);
11999 return result;
12000 }
12001
12002
12003 tree
12004 tree_block (tree t)
12005 {
12006 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
12007
12008 if (IS_EXPR_CODE_CLASS (c))
12009 return LOCATION_BLOCK (t->exp.locus);
12010 gcc_unreachable ();
12011 return NULL;
12012 }
12013
12014 void
12015 tree_set_block (tree t, tree b)
12016 {
12017 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
12018
12019 if (IS_EXPR_CODE_CLASS (c))
12020 {
12021 t->exp.locus = set_block (t->exp.locus, b);
12022 }
12023 else
12024 gcc_unreachable ();
12025 }
12026
12027 /* Create a nameless artificial label and put it in the current
12028 function context. The label has a location of LOC. Returns the
12029 newly created label. */
12030
12031 tree
12032 create_artificial_label (location_t loc)
12033 {
12034 tree lab = build_decl (loc,
12035 LABEL_DECL, NULL_TREE, void_type_node);
12036
12037 DECL_ARTIFICIAL (lab) = 1;
12038 DECL_IGNORED_P (lab) = 1;
12039 DECL_CONTEXT (lab) = current_function_decl;
12040 return lab;
12041 }
12042
12043 /* Given a tree, try to return a useful variable name that we can use
12044 to prefix a temporary that is being assigned the value of the tree.
12045 I.E. given <temp> = &A, return A. */
12046
12047 const char *
12048 get_name (tree t)
12049 {
12050 tree stripped_decl;
12051
12052 stripped_decl = t;
12053 STRIP_NOPS (stripped_decl);
12054 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
12055 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
12056 else if (TREE_CODE (stripped_decl) == SSA_NAME)
12057 {
12058 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
12059 if (!name)
12060 return NULL;
12061 return IDENTIFIER_POINTER (name);
12062 }
12063 else
12064 {
12065 switch (TREE_CODE (stripped_decl))
12066 {
12067 case ADDR_EXPR:
12068 return get_name (TREE_OPERAND (stripped_decl, 0));
12069 default:
12070 return NULL;
12071 }
12072 }
12073 }
12074
12075 /* Return true if TYPE has a variable argument list. */
12076
12077 bool
12078 stdarg_p (const_tree fntype)
12079 {
12080 function_args_iterator args_iter;
12081 tree n = NULL_TREE, t;
12082
12083 if (!fntype)
12084 return false;
12085
12086 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
12087 {
12088 n = t;
12089 }
12090
12091 return n != NULL_TREE && n != void_type_node;
12092 }
12093
12094 /* Return true if TYPE has a prototype. */
12095
12096 bool
12097 prototype_p (const_tree fntype)
12098 {
12099 tree t;
12100
12101 gcc_assert (fntype != NULL_TREE);
12102
12103 t = TYPE_ARG_TYPES (fntype);
12104 return (t != NULL_TREE);
12105 }
12106
12107 /* If BLOCK is inlined from an __attribute__((__artificial__))
12108 routine, return pointer to location from where it has been
12109 called. */
12110 location_t *
12111 block_nonartificial_location (tree block)
12112 {
12113 location_t *ret = NULL;
12114
12115 while (block && TREE_CODE (block) == BLOCK
12116 && BLOCK_ABSTRACT_ORIGIN (block))
12117 {
12118 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
12119
12120 while (TREE_CODE (ao) == BLOCK
12121 && BLOCK_ABSTRACT_ORIGIN (ao)
12122 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
12123 ao = BLOCK_ABSTRACT_ORIGIN (ao);
12124
12125 if (TREE_CODE (ao) == FUNCTION_DECL)
12126 {
12127 /* If AO is an artificial inline, point RET to the
12128 call site locus at which it has been inlined and continue
12129 the loop, in case AO's caller is also an artificial
12130 inline. */
12131 if (DECL_DECLARED_INLINE_P (ao)
12132 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
12133 ret = &BLOCK_SOURCE_LOCATION (block);
12134 else
12135 break;
12136 }
12137 else if (TREE_CODE (ao) != BLOCK)
12138 break;
12139
12140 block = BLOCK_SUPERCONTEXT (block);
12141 }
12142 return ret;
12143 }
12144
12145
12146 /* If EXP is inlined from an __attribute__((__artificial__))
12147 function, return the location of the original call expression. */
12148
12149 location_t
12150 tree_nonartificial_location (tree exp)
12151 {
12152 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
12153
12154 if (loc)
12155 return *loc;
12156 else
12157 return EXPR_LOCATION (exp);
12158 }
12159
12160
12161 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
12162 nodes. */
12163
12164 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
12165
12166 hashval_t
12167 cl_option_hasher::hash (tree x)
12168 {
12169 const_tree const t = x;
12170 const char *p;
12171 size_t i;
12172 size_t len = 0;
12173 hashval_t hash = 0;
12174
12175 if (TREE_CODE (t) == OPTIMIZATION_NODE)
12176 {
12177 p = (const char *)TREE_OPTIMIZATION (t);
12178 len = sizeof (struct cl_optimization);
12179 }
12180
12181 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
12182 return cl_target_option_hash (TREE_TARGET_OPTION (t));
12183
12184 else
12185 gcc_unreachable ();
12186
12187 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
12188 something else. */
12189 for (i = 0; i < len; i++)
12190 if (p[i])
12191 hash = (hash << 4) ^ ((i << 2) | p[i]);
12192
12193 return hash;
12194 }
12195
12196 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
12197 TARGET_OPTION tree node) is the same as that given by *Y, which is the
12198 same. */
12199
12200 bool
12201 cl_option_hasher::equal (tree x, tree y)
12202 {
12203 const_tree const xt = x;
12204 const_tree const yt = y;
12205 const char *xp;
12206 const char *yp;
12207 size_t len;
12208
12209 if (TREE_CODE (xt) != TREE_CODE (yt))
12210 return 0;
12211
12212 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
12213 {
12214 xp = (const char *)TREE_OPTIMIZATION (xt);
12215 yp = (const char *)TREE_OPTIMIZATION (yt);
12216 len = sizeof (struct cl_optimization);
12217 }
12218
12219 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
12220 {
12221 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
12222 TREE_TARGET_OPTION (yt));
12223 }
12224
12225 else
12226 gcc_unreachable ();
12227
12228 return (memcmp (xp, yp, len) == 0);
12229 }
12230
12231 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
12232
12233 tree
12234 build_optimization_node (struct gcc_options *opts)
12235 {
12236 tree t;
12237
12238 /* Use the cache of optimization nodes. */
12239
12240 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
12241 opts);
12242
12243 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
12244 t = *slot;
12245 if (!t)
12246 {
12247 /* Insert this one into the hash table. */
12248 t = cl_optimization_node;
12249 *slot = t;
12250
12251 /* Make a new node for next time round. */
12252 cl_optimization_node = make_node (OPTIMIZATION_NODE);
12253 }
12254
12255 return t;
12256 }
12257
12258 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
12259
12260 tree
12261 build_target_option_node (struct gcc_options *opts)
12262 {
12263 tree t;
12264
12265 /* Use the cache of optimization nodes. */
12266
12267 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
12268 opts);
12269
12270 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
12271 t = *slot;
12272 if (!t)
12273 {
12274 /* Insert this one into the hash table. */
12275 t = cl_target_option_node;
12276 *slot = t;
12277
12278 /* Make a new node for next time round. */
12279 cl_target_option_node = make_node (TARGET_OPTION_NODE);
12280 }
12281
12282 return t;
12283 }
12284
12285 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
12286 so that they aren't saved during PCH writing. */
12287
12288 void
12289 prepare_target_option_nodes_for_pch (void)
12290 {
12291 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
12292 for (; iter != cl_option_hash_table->end (); ++iter)
12293 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
12294 TREE_TARGET_GLOBALS (*iter) = NULL;
12295 }
12296
12297 /* Determine the "ultimate origin" of a block. The block may be an inlined
12298 instance of an inlined instance of a block which is local to an inline
12299 function, so we have to trace all of the way back through the origin chain
12300 to find out what sort of node actually served as the original seed for the
12301 given block. */
12302
12303 tree
12304 block_ultimate_origin (const_tree block)
12305 {
12306 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
12307
12308 /* BLOCK_ABSTRACT_ORIGIN can point to itself; ignore that if
12309 we're trying to output the abstract instance of this function. */
12310 if (BLOCK_ABSTRACT (block) && immediate_origin == block)
12311 return NULL_TREE;
12312
12313 if (immediate_origin == NULL_TREE)
12314 return NULL_TREE;
12315 else
12316 {
12317 tree ret_val;
12318 tree lookahead = immediate_origin;
12319
12320 do
12321 {
12322 ret_val = lookahead;
12323 lookahead = (TREE_CODE (ret_val) == BLOCK
12324 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
12325 }
12326 while (lookahead != NULL && lookahead != ret_val);
12327
12328 /* The block's abstract origin chain may not be the *ultimate* origin of
12329 the block. It could lead to a DECL that has an abstract origin set.
12330 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
12331 will give us if it has one). Note that DECL's abstract origins are
12332 supposed to be the most distant ancestor (or so decl_ultimate_origin
12333 claims), so we don't need to loop following the DECL origins. */
12334 if (DECL_P (ret_val))
12335 return DECL_ORIGIN (ret_val);
12336
12337 return ret_val;
12338 }
12339 }
12340
12341 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
12342 no instruction. */
12343
12344 bool
12345 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
12346 {
12347 /* Do not strip casts into or out of differing address spaces. */
12348 if (POINTER_TYPE_P (outer_type)
12349 && TYPE_ADDR_SPACE (TREE_TYPE (outer_type)) != ADDR_SPACE_GENERIC)
12350 {
12351 if (!POINTER_TYPE_P (inner_type)
12352 || (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
12353 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type))))
12354 return false;
12355 }
12356 else if (POINTER_TYPE_P (inner_type)
12357 && TYPE_ADDR_SPACE (TREE_TYPE (inner_type)) != ADDR_SPACE_GENERIC)
12358 {
12359 /* We already know that outer_type is not a pointer with
12360 a non-generic address space. */
12361 return false;
12362 }
12363
12364 /* Use precision rather then machine mode when we can, which gives
12365 the correct answer even for submode (bit-field) types. */
12366 if ((INTEGRAL_TYPE_P (outer_type)
12367 || POINTER_TYPE_P (outer_type)
12368 || TREE_CODE (outer_type) == OFFSET_TYPE)
12369 && (INTEGRAL_TYPE_P (inner_type)
12370 || POINTER_TYPE_P (inner_type)
12371 || TREE_CODE (inner_type) == OFFSET_TYPE))
12372 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
12373
12374 /* Otherwise fall back on comparing machine modes (e.g. for
12375 aggregate types, floats). */
12376 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
12377 }
12378
12379 /* Return true iff conversion in EXP generates no instruction. Mark
12380 it inline so that we fully inline into the stripping functions even
12381 though we have two uses of this function. */
12382
12383 static inline bool
12384 tree_nop_conversion (const_tree exp)
12385 {
12386 tree outer_type, inner_type;
12387
12388 if (!CONVERT_EXPR_P (exp)
12389 && TREE_CODE (exp) != NON_LVALUE_EXPR)
12390 return false;
12391 if (TREE_OPERAND (exp, 0) == error_mark_node)
12392 return false;
12393
12394 outer_type = TREE_TYPE (exp);
12395 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12396
12397 if (!inner_type)
12398 return false;
12399
12400 return tree_nop_conversion_p (outer_type, inner_type);
12401 }
12402
12403 /* Return true iff conversion in EXP generates no instruction. Don't
12404 consider conversions changing the signedness. */
12405
12406 static bool
12407 tree_sign_nop_conversion (const_tree exp)
12408 {
12409 tree outer_type, inner_type;
12410
12411 if (!tree_nop_conversion (exp))
12412 return false;
12413
12414 outer_type = TREE_TYPE (exp);
12415 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12416
12417 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
12418 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
12419 }
12420
12421 /* Strip conversions from EXP according to tree_nop_conversion and
12422 return the resulting expression. */
12423
12424 tree
12425 tree_strip_nop_conversions (tree exp)
12426 {
12427 while (tree_nop_conversion (exp))
12428 exp = TREE_OPERAND (exp, 0);
12429 return exp;
12430 }
12431
12432 /* Strip conversions from EXP according to tree_sign_nop_conversion
12433 and return the resulting expression. */
12434
12435 tree
12436 tree_strip_sign_nop_conversions (tree exp)
12437 {
12438 while (tree_sign_nop_conversion (exp))
12439 exp = TREE_OPERAND (exp, 0);
12440 return exp;
12441 }
12442
12443 /* Avoid any floating point extensions from EXP. */
12444 tree
12445 strip_float_extensions (tree exp)
12446 {
12447 tree sub, expt, subt;
12448
12449 /* For floating point constant look up the narrowest type that can hold
12450 it properly and handle it like (type)(narrowest_type)constant.
12451 This way we can optimize for instance a=a*2.0 where "a" is float
12452 but 2.0 is double constant. */
12453 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
12454 {
12455 REAL_VALUE_TYPE orig;
12456 tree type = NULL;
12457
12458 orig = TREE_REAL_CST (exp);
12459 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
12460 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
12461 type = float_type_node;
12462 else if (TYPE_PRECISION (TREE_TYPE (exp))
12463 > TYPE_PRECISION (double_type_node)
12464 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
12465 type = double_type_node;
12466 if (type)
12467 return build_real_truncate (type, orig);
12468 }
12469
12470 if (!CONVERT_EXPR_P (exp))
12471 return exp;
12472
12473 sub = TREE_OPERAND (exp, 0);
12474 subt = TREE_TYPE (sub);
12475 expt = TREE_TYPE (exp);
12476
12477 if (!FLOAT_TYPE_P (subt))
12478 return exp;
12479
12480 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
12481 return exp;
12482
12483 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
12484 return exp;
12485
12486 return strip_float_extensions (sub);
12487 }
12488
12489 /* Strip out all handled components that produce invariant
12490 offsets. */
12491
12492 const_tree
12493 strip_invariant_refs (const_tree op)
12494 {
12495 while (handled_component_p (op))
12496 {
12497 switch (TREE_CODE (op))
12498 {
12499 case ARRAY_REF:
12500 case ARRAY_RANGE_REF:
12501 if (!is_gimple_constant (TREE_OPERAND (op, 1))
12502 || TREE_OPERAND (op, 2) != NULL_TREE
12503 || TREE_OPERAND (op, 3) != NULL_TREE)
12504 return NULL;
12505 break;
12506
12507 case COMPONENT_REF:
12508 if (TREE_OPERAND (op, 2) != NULL_TREE)
12509 return NULL;
12510 break;
12511
12512 default:;
12513 }
12514 op = TREE_OPERAND (op, 0);
12515 }
12516
12517 return op;
12518 }
12519
12520 static GTY(()) tree gcc_eh_personality_decl;
12521
12522 /* Return the GCC personality function decl. */
12523
12524 tree
12525 lhd_gcc_personality (void)
12526 {
12527 if (!gcc_eh_personality_decl)
12528 gcc_eh_personality_decl = build_personality_function ("gcc");
12529 return gcc_eh_personality_decl;
12530 }
12531
12532 /* TARGET is a call target of GIMPLE call statement
12533 (obtained by gimple_call_fn). Return true if it is
12534 OBJ_TYPE_REF representing an virtual call of C++ method.
12535 (As opposed to OBJ_TYPE_REF representing objc calls
12536 through a cast where middle-end devirtualization machinery
12537 can't apply.) */
12538
12539 bool
12540 virtual_method_call_p (const_tree target)
12541 {
12542 if (TREE_CODE (target) != OBJ_TYPE_REF)
12543 return false;
12544 tree t = TREE_TYPE (target);
12545 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
12546 t = TREE_TYPE (t);
12547 if (TREE_CODE (t) == FUNCTION_TYPE)
12548 return false;
12549 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
12550 /* If we do not have BINFO associated, it means that type was built
12551 without devirtualization enabled. Do not consider this a virtual
12552 call. */
12553 if (!TYPE_BINFO (obj_type_ref_class (target)))
12554 return false;
12555 return true;
12556 }
12557
12558 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
12559
12560 tree
12561 obj_type_ref_class (const_tree ref)
12562 {
12563 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
12564 ref = TREE_TYPE (ref);
12565 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12566 ref = TREE_TYPE (ref);
12567 /* We look for type THIS points to. ObjC also builds
12568 OBJ_TYPE_REF with non-method calls, Their first parameter
12569 ID however also corresponds to class type. */
12570 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
12571 || TREE_CODE (ref) == FUNCTION_TYPE);
12572 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
12573 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12574 return TREE_TYPE (ref);
12575 }
12576
12577 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
12578
12579 static tree
12580 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
12581 {
12582 unsigned int i;
12583 tree base_binfo, b;
12584
12585 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12586 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
12587 && types_same_for_odr (TREE_TYPE (base_binfo), type))
12588 return base_binfo;
12589 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
12590 return b;
12591 return NULL;
12592 }
12593
12594 /* Try to find a base info of BINFO that would have its field decl at offset
12595 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
12596 found, return, otherwise return NULL_TREE. */
12597
12598 tree
12599 get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
12600 {
12601 tree type = BINFO_TYPE (binfo);
12602
12603 while (true)
12604 {
12605 HOST_WIDE_INT pos, size;
12606 tree fld;
12607 int i;
12608
12609 if (types_same_for_odr (type, expected_type))
12610 return binfo;
12611 if (offset < 0)
12612 return NULL_TREE;
12613
12614 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
12615 {
12616 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
12617 continue;
12618
12619 pos = int_bit_position (fld);
12620 size = tree_to_uhwi (DECL_SIZE (fld));
12621 if (pos <= offset && (pos + size) > offset)
12622 break;
12623 }
12624 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
12625 return NULL_TREE;
12626
12627 /* Offset 0 indicates the primary base, whose vtable contents are
12628 represented in the binfo for the derived class. */
12629 else if (offset != 0)
12630 {
12631 tree found_binfo = NULL, base_binfo;
12632 /* Offsets in BINFO are in bytes relative to the whole structure
12633 while POS is in bits relative to the containing field. */
12634 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
12635 / BITS_PER_UNIT);
12636
12637 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12638 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
12639 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
12640 {
12641 found_binfo = base_binfo;
12642 break;
12643 }
12644 if (found_binfo)
12645 binfo = found_binfo;
12646 else
12647 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
12648 binfo_offset);
12649 }
12650
12651 type = TREE_TYPE (fld);
12652 offset -= pos;
12653 }
12654 }
12655
12656 /* Returns true if X is a typedef decl. */
12657
12658 bool
12659 is_typedef_decl (const_tree x)
12660 {
12661 return (x && TREE_CODE (x) == TYPE_DECL
12662 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
12663 }
12664
12665 /* Returns true iff TYPE is a type variant created for a typedef. */
12666
12667 bool
12668 typedef_variant_p (const_tree type)
12669 {
12670 return is_typedef_decl (TYPE_NAME (type));
12671 }
12672
12673 /* Warn about a use of an identifier which was marked deprecated. */
12674 void
12675 warn_deprecated_use (tree node, tree attr)
12676 {
12677 const char *msg;
12678
12679 if (node == 0 || !warn_deprecated_decl)
12680 return;
12681
12682 if (!attr)
12683 {
12684 if (DECL_P (node))
12685 attr = DECL_ATTRIBUTES (node);
12686 else if (TYPE_P (node))
12687 {
12688 tree decl = TYPE_STUB_DECL (node);
12689 if (decl)
12690 attr = lookup_attribute ("deprecated",
12691 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12692 }
12693 }
12694
12695 if (attr)
12696 attr = lookup_attribute ("deprecated", attr);
12697
12698 if (attr)
12699 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
12700 else
12701 msg = NULL;
12702
12703 bool w;
12704 if (DECL_P (node))
12705 {
12706 if (msg)
12707 w = warning (OPT_Wdeprecated_declarations,
12708 "%qD is deprecated: %s", node, msg);
12709 else
12710 w = warning (OPT_Wdeprecated_declarations,
12711 "%qD is deprecated", node);
12712 if (w)
12713 inform (DECL_SOURCE_LOCATION (node), "declared here");
12714 }
12715 else if (TYPE_P (node))
12716 {
12717 tree what = NULL_TREE;
12718 tree decl = TYPE_STUB_DECL (node);
12719
12720 if (TYPE_NAME (node))
12721 {
12722 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12723 what = TYPE_NAME (node);
12724 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12725 && DECL_NAME (TYPE_NAME (node)))
12726 what = DECL_NAME (TYPE_NAME (node));
12727 }
12728
12729 if (decl)
12730 {
12731 if (what)
12732 {
12733 if (msg)
12734 w = warning (OPT_Wdeprecated_declarations,
12735 "%qE is deprecated: %s", what, msg);
12736 else
12737 w = warning (OPT_Wdeprecated_declarations,
12738 "%qE is deprecated", what);
12739 }
12740 else
12741 {
12742 if (msg)
12743 w = warning (OPT_Wdeprecated_declarations,
12744 "type is deprecated: %s", msg);
12745 else
12746 w = warning (OPT_Wdeprecated_declarations,
12747 "type is deprecated");
12748 }
12749 if (w)
12750 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12751 }
12752 else
12753 {
12754 if (what)
12755 {
12756 if (msg)
12757 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
12758 what, msg);
12759 else
12760 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
12761 }
12762 else
12763 {
12764 if (msg)
12765 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
12766 msg);
12767 else
12768 warning (OPT_Wdeprecated_declarations, "type is deprecated");
12769 }
12770 }
12771 }
12772 }
12773
12774 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12775 somewhere in it. */
12776
12777 bool
12778 contains_bitfld_component_ref_p (const_tree ref)
12779 {
12780 while (handled_component_p (ref))
12781 {
12782 if (TREE_CODE (ref) == COMPONENT_REF
12783 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12784 return true;
12785 ref = TREE_OPERAND (ref, 0);
12786 }
12787
12788 return false;
12789 }
12790
12791 /* Try to determine whether a TRY_CATCH expression can fall through.
12792 This is a subroutine of block_may_fallthru. */
12793
12794 static bool
12795 try_catch_may_fallthru (const_tree stmt)
12796 {
12797 tree_stmt_iterator i;
12798
12799 /* If the TRY block can fall through, the whole TRY_CATCH can
12800 fall through. */
12801 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12802 return true;
12803
12804 i = tsi_start (TREE_OPERAND (stmt, 1));
12805 switch (TREE_CODE (tsi_stmt (i)))
12806 {
12807 case CATCH_EXPR:
12808 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12809 catch expression and a body. The whole TRY_CATCH may fall
12810 through iff any of the catch bodies falls through. */
12811 for (; !tsi_end_p (i); tsi_next (&i))
12812 {
12813 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12814 return true;
12815 }
12816 return false;
12817
12818 case EH_FILTER_EXPR:
12819 /* The exception filter expression only matters if there is an
12820 exception. If the exception does not match EH_FILTER_TYPES,
12821 we will execute EH_FILTER_FAILURE, and we will fall through
12822 if that falls through. If the exception does match
12823 EH_FILTER_TYPES, the stack unwinder will continue up the
12824 stack, so we will not fall through. We don't know whether we
12825 will throw an exception which matches EH_FILTER_TYPES or not,
12826 so we just ignore EH_FILTER_TYPES and assume that we might
12827 throw an exception which doesn't match. */
12828 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12829
12830 default:
12831 /* This case represents statements to be executed when an
12832 exception occurs. Those statements are implicitly followed
12833 by a RESX statement to resume execution after the exception.
12834 So in this case the TRY_CATCH never falls through. */
12835 return false;
12836 }
12837 }
12838
12839 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12840 need not be 100% accurate; simply be conservative and return true if we
12841 don't know. This is used only to avoid stupidly generating extra code.
12842 If we're wrong, we'll just delete the extra code later. */
12843
12844 bool
12845 block_may_fallthru (const_tree block)
12846 {
12847 /* This CONST_CAST is okay because expr_last returns its argument
12848 unmodified and we assign it to a const_tree. */
12849 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12850
12851 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12852 {
12853 case GOTO_EXPR:
12854 case RETURN_EXPR:
12855 /* Easy cases. If the last statement of the block implies
12856 control transfer, then we can't fall through. */
12857 return false;
12858
12859 case SWITCH_EXPR:
12860 /* If SWITCH_LABELS is set, this is lowered, and represents a
12861 branch to a selected label and hence can not fall through.
12862 Otherwise SWITCH_BODY is set, and the switch can fall
12863 through. */
12864 return SWITCH_LABELS (stmt) == NULL_TREE;
12865
12866 case COND_EXPR:
12867 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12868 return true;
12869 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12870
12871 case BIND_EXPR:
12872 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12873
12874 case TRY_CATCH_EXPR:
12875 return try_catch_may_fallthru (stmt);
12876
12877 case TRY_FINALLY_EXPR:
12878 /* The finally clause is always executed after the try clause,
12879 so if it does not fall through, then the try-finally will not
12880 fall through. Otherwise, if the try clause does not fall
12881 through, then when the finally clause falls through it will
12882 resume execution wherever the try clause was going. So the
12883 whole try-finally will only fall through if both the try
12884 clause and the finally clause fall through. */
12885 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12886 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12887
12888 case MODIFY_EXPR:
12889 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12890 stmt = TREE_OPERAND (stmt, 1);
12891 else
12892 return true;
12893 /* FALLTHRU */
12894
12895 case CALL_EXPR:
12896 /* Functions that do not return do not fall through. */
12897 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12898
12899 case CLEANUP_POINT_EXPR:
12900 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12901
12902 case TARGET_EXPR:
12903 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12904
12905 case ERROR_MARK:
12906 return true;
12907
12908 default:
12909 return lang_hooks.block_may_fallthru (stmt);
12910 }
12911 }
12912
12913 /* True if we are using EH to handle cleanups. */
12914 static bool using_eh_for_cleanups_flag = false;
12915
12916 /* This routine is called from front ends to indicate eh should be used for
12917 cleanups. */
12918 void
12919 using_eh_for_cleanups (void)
12920 {
12921 using_eh_for_cleanups_flag = true;
12922 }
12923
12924 /* Query whether EH is used for cleanups. */
12925 bool
12926 using_eh_for_cleanups_p (void)
12927 {
12928 return using_eh_for_cleanups_flag;
12929 }
12930
12931 /* Wrapper for tree_code_name to ensure that tree code is valid */
12932 const char *
12933 get_tree_code_name (enum tree_code code)
12934 {
12935 const char *invalid = "<invalid tree code>";
12936
12937 if (code >= MAX_TREE_CODES)
12938 return invalid;
12939
12940 return tree_code_name[code];
12941 }
12942
12943 /* Drops the TREE_OVERFLOW flag from T. */
12944
12945 tree
12946 drop_tree_overflow (tree t)
12947 {
12948 gcc_checking_assert (TREE_OVERFLOW (t));
12949
12950 /* For tree codes with a sharing machinery re-build the result. */
12951 if (TREE_CODE (t) == INTEGER_CST)
12952 return wide_int_to_tree (TREE_TYPE (t), t);
12953
12954 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12955 and drop the flag. */
12956 t = copy_node (t);
12957 TREE_OVERFLOW (t) = 0;
12958 return t;
12959 }
12960
12961 /* Given a memory reference expression T, return its base address.
12962 The base address of a memory reference expression is the main
12963 object being referenced. For instance, the base address for
12964 'array[i].fld[j]' is 'array'. You can think of this as stripping
12965 away the offset part from a memory address.
12966
12967 This function calls handled_component_p to strip away all the inner
12968 parts of the memory reference until it reaches the base object. */
12969
12970 tree
12971 get_base_address (tree t)
12972 {
12973 while (handled_component_p (t))
12974 t = TREE_OPERAND (t, 0);
12975
12976 if ((TREE_CODE (t) == MEM_REF
12977 || TREE_CODE (t) == TARGET_MEM_REF)
12978 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12979 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12980
12981 /* ??? Either the alias oracle or all callers need to properly deal
12982 with WITH_SIZE_EXPRs before we can look through those. */
12983 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12984 return NULL_TREE;
12985
12986 return t;
12987 }
12988
12989 /* Return a tree of sizetype representing the size, in bytes, of the element
12990 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12991
12992 tree
12993 array_ref_element_size (tree exp)
12994 {
12995 tree aligned_size = TREE_OPERAND (exp, 3);
12996 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
12997 location_t loc = EXPR_LOCATION (exp);
12998
12999 /* If a size was specified in the ARRAY_REF, it's the size measured
13000 in alignment units of the element type. So multiply by that value. */
13001 if (aligned_size)
13002 {
13003 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
13004 sizetype from another type of the same width and signedness. */
13005 if (TREE_TYPE (aligned_size) != sizetype)
13006 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
13007 return size_binop_loc (loc, MULT_EXPR, aligned_size,
13008 size_int (TYPE_ALIGN_UNIT (elmt_type)));
13009 }
13010
13011 /* Otherwise, take the size from that of the element type. Substitute
13012 any PLACEHOLDER_EXPR that we have. */
13013 else
13014 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
13015 }
13016
13017 /* Return a tree representing the lower bound of the array mentioned in
13018 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
13019
13020 tree
13021 array_ref_low_bound (tree exp)
13022 {
13023 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
13024
13025 /* If a lower bound is specified in EXP, use it. */
13026 if (TREE_OPERAND (exp, 2))
13027 return TREE_OPERAND (exp, 2);
13028
13029 /* Otherwise, if there is a domain type and it has a lower bound, use it,
13030 substituting for a PLACEHOLDER_EXPR as needed. */
13031 if (domain_type && TYPE_MIN_VALUE (domain_type))
13032 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
13033
13034 /* Otherwise, return a zero of the appropriate type. */
13035 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
13036 }
13037
13038 /* Return a tree representing the upper bound of the array mentioned in
13039 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
13040
13041 tree
13042 array_ref_up_bound (tree exp)
13043 {
13044 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
13045
13046 /* If there is a domain type and it has an upper bound, use it, substituting
13047 for a PLACEHOLDER_EXPR as needed. */
13048 if (domain_type && TYPE_MAX_VALUE (domain_type))
13049 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
13050
13051 /* Otherwise fail. */
13052 return NULL_TREE;
13053 }
13054
13055 /* Returns true if REF is an array reference to an array at the end of
13056 a structure. If this is the case, the array may be allocated larger
13057 than its upper bound implies. */
13058
13059 bool
13060 array_at_struct_end_p (tree ref)
13061 {
13062 if (TREE_CODE (ref) != ARRAY_REF
13063 && TREE_CODE (ref) != ARRAY_RANGE_REF)
13064 return false;
13065
13066 while (handled_component_p (ref))
13067 {
13068 /* If the reference chain contains a component reference to a
13069 non-union type and there follows another field the reference
13070 is not at the end of a structure. */
13071 if (TREE_CODE (ref) == COMPONENT_REF
13072 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
13073 {
13074 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
13075 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
13076 nextf = DECL_CHAIN (nextf);
13077 if (nextf)
13078 return false;
13079 }
13080
13081 ref = TREE_OPERAND (ref, 0);
13082 }
13083
13084 tree size = NULL;
13085
13086 if (TREE_CODE (ref) == MEM_REF
13087 && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
13088 {
13089 size = TYPE_SIZE (TREE_TYPE (ref));
13090 ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
13091 }
13092
13093 /* If the reference is based on a declared entity, the size of the array
13094 is constrained by its given domain. (Do not trust commons PR/69368). */
13095 if (DECL_P (ref)
13096 /* Be sure the size of MEM_REF target match. For example:
13097
13098 char buf[10];
13099 struct foo *str = (struct foo *)&buf;
13100
13101 str->trailin_array[2] = 1;
13102
13103 is valid because BUF allocate enough space. */
13104
13105 && (!size || (DECL_SIZE (ref) != NULL
13106 && operand_equal_p (DECL_SIZE (ref), size, 0)))
13107 && !(flag_unconstrained_commons
13108 && TREE_CODE (ref) == VAR_DECL && DECL_COMMON (ref)))
13109 return false;
13110
13111 return true;
13112 }
13113
13114 /* Return a tree representing the offset, in bytes, of the field referenced
13115 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
13116
13117 tree
13118 component_ref_field_offset (tree exp)
13119 {
13120 tree aligned_offset = TREE_OPERAND (exp, 2);
13121 tree field = TREE_OPERAND (exp, 1);
13122 location_t loc = EXPR_LOCATION (exp);
13123
13124 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
13125 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
13126 value. */
13127 if (aligned_offset)
13128 {
13129 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
13130 sizetype from another type of the same width and signedness. */
13131 if (TREE_TYPE (aligned_offset) != sizetype)
13132 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
13133 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
13134 size_int (DECL_OFFSET_ALIGN (field)
13135 / BITS_PER_UNIT));
13136 }
13137
13138 /* Otherwise, take the offset from that of the field. Substitute
13139 any PLACEHOLDER_EXPR that we have. */
13140 else
13141 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
13142 }
13143
13144 /* Return the machine mode of T. For vectors, returns the mode of the
13145 inner type. The main use case is to feed the result to HONOR_NANS,
13146 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
13147
13148 machine_mode
13149 element_mode (const_tree t)
13150 {
13151 if (!TYPE_P (t))
13152 t = TREE_TYPE (t);
13153 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
13154 t = TREE_TYPE (t);
13155 return TYPE_MODE (t);
13156 }
13157
13158
13159 /* Veirfy that basic properties of T match TV and thus T can be a variant of
13160 TV. TV should be the more specified variant (i.e. the main variant). */
13161
13162 static bool
13163 verify_type_variant (const_tree t, tree tv)
13164 {
13165 /* Type variant can differ by:
13166
13167 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
13168 ENCODE_QUAL_ADDR_SPACE.
13169 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
13170 in this case some values may not be set in the variant types
13171 (see TYPE_COMPLETE_P checks).
13172 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
13173 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
13174 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
13175 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
13176 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
13177 this is necessary to make it possible to merge types form different TUs
13178 - arrays, pointers and references may have TREE_TYPE that is a variant
13179 of TREE_TYPE of their main variants.
13180 - aggregates may have new TYPE_FIELDS list that list variants of
13181 the main variant TYPE_FIELDS.
13182 - vector types may differ by TYPE_VECTOR_OPAQUE
13183 - TYPE_METHODS is always NULL for vairant types and maintained for
13184 main variant only.
13185 */
13186
13187 /* Convenience macro for matching individual fields. */
13188 #define verify_variant_match(flag) \
13189 do { \
13190 if (flag (tv) != flag (t)) \
13191 { \
13192 error ("type variant differs by " #flag "."); \
13193 debug_tree (tv); \
13194 return false; \
13195 } \
13196 } while (false)
13197
13198 /* tree_base checks. */
13199
13200 verify_variant_match (TREE_CODE);
13201 /* FIXME: Ada builds non-artificial variants of artificial types. */
13202 if (TYPE_ARTIFICIAL (tv) && 0)
13203 verify_variant_match (TYPE_ARTIFICIAL);
13204 if (POINTER_TYPE_P (tv))
13205 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
13206 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
13207 verify_variant_match (TYPE_UNSIGNED);
13208 verify_variant_match (TYPE_ALIGN_OK);
13209 verify_variant_match (TYPE_PACKED);
13210 if (TREE_CODE (t) == REFERENCE_TYPE)
13211 verify_variant_match (TYPE_REF_IS_RVALUE);
13212 if (AGGREGATE_TYPE_P (t))
13213 verify_variant_match (TYPE_REVERSE_STORAGE_ORDER);
13214 else
13215 verify_variant_match (TYPE_SATURATING);
13216 /* FIXME: This check trigger during libstdc++ build. */
13217 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t) && 0)
13218 verify_variant_match (TYPE_FINAL_P);
13219
13220 /* tree_type_common checks. */
13221
13222 if (COMPLETE_TYPE_P (t))
13223 {
13224 verify_variant_match (TYPE_SIZE);
13225 verify_variant_match (TYPE_MODE);
13226 if (TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv)
13227 /* FIXME: ideally we should compare pointer equality, but java FE
13228 produce variants where size is INTEGER_CST of different type (int
13229 wrt size_type) during libjava biuld. */
13230 && !operand_equal_p (TYPE_SIZE_UNIT (t), TYPE_SIZE_UNIT (tv), 0))
13231 {
13232 error ("type variant has different TYPE_SIZE_UNIT");
13233 debug_tree (tv);
13234 error ("type variant's TYPE_SIZE_UNIT");
13235 debug_tree (TYPE_SIZE_UNIT (tv));
13236 error ("type's TYPE_SIZE_UNIT");
13237 debug_tree (TYPE_SIZE_UNIT (t));
13238 return false;
13239 }
13240 }
13241 verify_variant_match (TYPE_PRECISION);
13242 verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
13243 if (RECORD_OR_UNION_TYPE_P (t))
13244 verify_variant_match (TYPE_TRANSPARENT_AGGR);
13245 else if (TREE_CODE (t) == ARRAY_TYPE)
13246 verify_variant_match (TYPE_NONALIASED_COMPONENT);
13247 /* During LTO we merge variant lists from diferent translation units
13248 that may differ BY TYPE_CONTEXT that in turn may point
13249 to TRANSLATION_UNIT_DECL.
13250 Ada also builds variants of types with different TYPE_CONTEXT. */
13251 if ((!in_lto_p || !TYPE_FILE_SCOPE_P (t)) && 0)
13252 verify_variant_match (TYPE_CONTEXT);
13253 verify_variant_match (TYPE_STRING_FLAG);
13254 if (TYPE_ALIAS_SET_KNOWN_P (t))
13255 {
13256 error ("type variant with TYPE_ALIAS_SET_KNOWN_P");
13257 debug_tree (tv);
13258 return false;
13259 }
13260
13261 /* tree_type_non_common checks. */
13262
13263 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13264 and dangle the pointer from time to time. */
13265 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
13266 && (in_lto_p || !TYPE_VFIELD (tv)
13267 || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
13268 {
13269 error ("type variant has different TYPE_VFIELD");
13270 debug_tree (tv);
13271 return false;
13272 }
13273 if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
13274 || TREE_CODE (t) == INTEGER_TYPE
13275 || TREE_CODE (t) == BOOLEAN_TYPE
13276 || TREE_CODE (t) == REAL_TYPE
13277 || TREE_CODE (t) == FIXED_POINT_TYPE)
13278 {
13279 verify_variant_match (TYPE_MAX_VALUE);
13280 verify_variant_match (TYPE_MIN_VALUE);
13281 }
13282 if (TREE_CODE (t) == METHOD_TYPE)
13283 verify_variant_match (TYPE_METHOD_BASETYPE);
13284 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_METHODS (t))
13285 {
13286 error ("type variant has TYPE_METHODS");
13287 debug_tree (tv);
13288 return false;
13289 }
13290 if (TREE_CODE (t) == OFFSET_TYPE)
13291 verify_variant_match (TYPE_OFFSET_BASETYPE);
13292 if (TREE_CODE (t) == ARRAY_TYPE)
13293 verify_variant_match (TYPE_ARRAY_MAX_SIZE);
13294 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
13295 or even type's main variant. This is needed to make bootstrap pass
13296 and the bug seems new in GCC 5.
13297 C++ FE should be updated to make this consistent and we should check
13298 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
13299 is a match with main variant.
13300
13301 Also disable the check for Java for now because of parser hack that builds
13302 first an dummy BINFO and then sometimes replace it by real BINFO in some
13303 of the copies. */
13304 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
13305 && TYPE_BINFO (t) != TYPE_BINFO (tv)
13306 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
13307 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
13308 at LTO time only. */
13309 && (in_lto_p && odr_type_p (t)))
13310 {
13311 error ("type variant has different TYPE_BINFO");
13312 debug_tree (tv);
13313 error ("type variant's TYPE_BINFO");
13314 debug_tree (TYPE_BINFO (tv));
13315 error ("type's TYPE_BINFO");
13316 debug_tree (TYPE_BINFO (t));
13317 return false;
13318 }
13319
13320 /* Check various uses of TYPE_VALUES_RAW. */
13321 if (TREE_CODE (t) == ENUMERAL_TYPE)
13322 verify_variant_match (TYPE_VALUES);
13323 else if (TREE_CODE (t) == ARRAY_TYPE)
13324 verify_variant_match (TYPE_DOMAIN);
13325 /* Permit incomplete variants of complete type. While FEs may complete
13326 all variants, this does not happen for C++ templates in all cases. */
13327 else if (RECORD_OR_UNION_TYPE_P (t)
13328 && COMPLETE_TYPE_P (t)
13329 && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
13330 {
13331 tree f1, f2;
13332
13333 /* Fortran builds qualified variants as new records with items of
13334 qualified type. Verify that they looks same. */
13335 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
13336 f1 && f2;
13337 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13338 if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
13339 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
13340 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
13341 /* FIXME: gfc_nonrestricted_type builds all types as variants
13342 with exception of pointer types. It deeply copies the type
13343 which means that we may end up with a variant type
13344 referring non-variant pointer. We may change it to
13345 produce types as variants, too, like
13346 objc_get_protocol_qualified_type does. */
13347 && !POINTER_TYPE_P (TREE_TYPE (f1)))
13348 || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
13349 || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
13350 break;
13351 if (f1 || f2)
13352 {
13353 error ("type variant has different TYPE_FIELDS");
13354 debug_tree (tv);
13355 error ("first mismatch is field");
13356 debug_tree (f1);
13357 error ("and field");
13358 debug_tree (f2);
13359 return false;
13360 }
13361 }
13362 else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE))
13363 verify_variant_match (TYPE_ARG_TYPES);
13364 /* For C++ the qualified variant of array type is really an array type
13365 of qualified TREE_TYPE.
13366 objc builds variants of pointer where pointer to type is a variant, too
13367 in objc_get_protocol_qualified_type. */
13368 if (TREE_TYPE (t) != TREE_TYPE (tv)
13369 && ((TREE_CODE (t) != ARRAY_TYPE
13370 && !POINTER_TYPE_P (t))
13371 || TYPE_MAIN_VARIANT (TREE_TYPE (t))
13372 != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
13373 {
13374 error ("type variant has different TREE_TYPE");
13375 debug_tree (tv);
13376 error ("type variant's TREE_TYPE");
13377 debug_tree (TREE_TYPE (tv));
13378 error ("type's TREE_TYPE");
13379 debug_tree (TREE_TYPE (t));
13380 return false;
13381 }
13382 if (type_with_alias_set_p (t)
13383 && !gimple_canonical_types_compatible_p (t, tv, false))
13384 {
13385 error ("type is not compatible with its vairant");
13386 debug_tree (tv);
13387 error ("type variant's TREE_TYPE");
13388 debug_tree (TREE_TYPE (tv));
13389 error ("type's TREE_TYPE");
13390 debug_tree (TREE_TYPE (t));
13391 return false;
13392 }
13393 return true;
13394 #undef verify_variant_match
13395 }
13396
13397
13398 /* The TYPE_CANONICAL merging machinery. It should closely resemble
13399 the middle-end types_compatible_p function. It needs to avoid
13400 claiming types are different for types that should be treated
13401 the same with respect to TBAA. Canonical types are also used
13402 for IL consistency checks via the useless_type_conversion_p
13403 predicate which does not handle all type kinds itself but falls
13404 back to pointer-comparison of TYPE_CANONICAL for aggregates
13405 for example. */
13406
13407 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
13408 type calculation because we need to allow inter-operability between signed
13409 and unsigned variants. */
13410
13411 bool
13412 type_with_interoperable_signedness (const_tree type)
13413 {
13414 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
13415 signed char and unsigned char. Similarly fortran FE builds
13416 C_SIZE_T as signed type, while C defines it unsigned. */
13417
13418 return tree_code_for_canonical_type_merging (TREE_CODE (type))
13419 == INTEGER_TYPE
13420 && (TYPE_PRECISION (type) == TYPE_PRECISION (signed_char_type_node)
13421 || TYPE_PRECISION (type) == TYPE_PRECISION (size_type_node));
13422 }
13423
13424 /* Return true iff T1 and T2 are structurally identical for what
13425 TBAA is concerned.
13426 This function is used both by lto.c canonical type merging and by the
13427 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
13428 that have TYPE_CANONICAL defined and assume them equivalent. This is useful
13429 only for LTO because only in these cases TYPE_CANONICAL equivalence
13430 correspond to one defined by gimple_canonical_types_compatible_p. */
13431
13432 bool
13433 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
13434 bool trust_type_canonical)
13435 {
13436 /* Type variants should be same as the main variant. When not doing sanity
13437 checking to verify this fact, go to main variants and save some work. */
13438 if (trust_type_canonical)
13439 {
13440 t1 = TYPE_MAIN_VARIANT (t1);
13441 t2 = TYPE_MAIN_VARIANT (t2);
13442 }
13443
13444 /* Check first for the obvious case of pointer identity. */
13445 if (t1 == t2)
13446 return true;
13447
13448 /* Check that we have two types to compare. */
13449 if (t1 == NULL_TREE || t2 == NULL_TREE)
13450 return false;
13451
13452 /* We consider complete types always compatible with incomplete type.
13453 This does not make sense for canonical type calculation and thus we
13454 need to ensure that we are never called on it.
13455
13456 FIXME: For more correctness the function probably should have three modes
13457 1) mode assuming that types are complete mathcing their structure
13458 2) mode allowing incomplete types but producing equivalence classes
13459 and thus ignoring all info from complete types
13460 3) mode allowing incomplete types to match complete but checking
13461 compatibility between complete types.
13462
13463 1 and 2 can be used for canonical type calculation. 3 is the real
13464 definition of type compatibility that can be used i.e. for warnings during
13465 declaration merging. */
13466
13467 gcc_assert (!trust_type_canonical
13468 || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
13469 /* If the types have been previously registered and found equal
13470 they still are. */
13471
13472 if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
13473 && trust_type_canonical)
13474 {
13475 /* Do not use TYPE_CANONICAL of pointer types. For LTO streamed types
13476 they are always NULL, but they are set to non-NULL for types
13477 constructed by build_pointer_type and variants. In this case the
13478 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
13479 all pointers are considered equal. Be sure to not return false
13480 negatives. */
13481 gcc_checking_assert (canonical_type_used_p (t1)
13482 && canonical_type_used_p (t2));
13483 return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
13484 }
13485
13486 /* Can't be the same type if the types don't have the same code. */
13487 enum tree_code code = tree_code_for_canonical_type_merging (TREE_CODE (t1));
13488 if (code != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
13489 return false;
13490
13491 /* Qualifiers do not matter for canonical type comparison purposes. */
13492
13493 /* Void types and nullptr types are always the same. */
13494 if (TREE_CODE (t1) == VOID_TYPE
13495 || TREE_CODE (t1) == NULLPTR_TYPE)
13496 return true;
13497
13498 /* Can't be the same type if they have different mode. */
13499 if (TYPE_MODE (t1) != TYPE_MODE (t2))
13500 return false;
13501
13502 /* Non-aggregate types can be handled cheaply. */
13503 if (INTEGRAL_TYPE_P (t1)
13504 || SCALAR_FLOAT_TYPE_P (t1)
13505 || FIXED_POINT_TYPE_P (t1)
13506 || TREE_CODE (t1) == VECTOR_TYPE
13507 || TREE_CODE (t1) == COMPLEX_TYPE
13508 || TREE_CODE (t1) == OFFSET_TYPE
13509 || POINTER_TYPE_P (t1))
13510 {
13511 /* Can't be the same type if they have different recision. */
13512 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
13513 return false;
13514
13515 /* In some cases the signed and unsigned types are required to be
13516 inter-operable. */
13517 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)
13518 && !type_with_interoperable_signedness (t1))
13519 return false;
13520
13521 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
13522 interoperable with "signed char". Unless all frontends are revisited
13523 to agree on these types, we must ignore the flag completely. */
13524
13525 /* Fortran standard define C_PTR type that is compatible with every
13526 C pointer. For this reason we need to glob all pointers into one.
13527 Still pointers in different address spaces are not compatible. */
13528 if (POINTER_TYPE_P (t1))
13529 {
13530 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
13531 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
13532 return false;
13533 }
13534
13535 /* Tail-recurse to components. */
13536 if (TREE_CODE (t1) == VECTOR_TYPE
13537 || TREE_CODE (t1) == COMPLEX_TYPE)
13538 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
13539 TREE_TYPE (t2),
13540 trust_type_canonical);
13541
13542 return true;
13543 }
13544
13545 /* Do type-specific comparisons. */
13546 switch (TREE_CODE (t1))
13547 {
13548 case ARRAY_TYPE:
13549 /* Array types are the same if the element types are the same and
13550 the number of elements are the same. */
13551 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13552 trust_type_canonical)
13553 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
13554 || TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2)
13555 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
13556 return false;
13557 else
13558 {
13559 tree i1 = TYPE_DOMAIN (t1);
13560 tree i2 = TYPE_DOMAIN (t2);
13561
13562 /* For an incomplete external array, the type domain can be
13563 NULL_TREE. Check this condition also. */
13564 if (i1 == NULL_TREE && i2 == NULL_TREE)
13565 return true;
13566 else if (i1 == NULL_TREE || i2 == NULL_TREE)
13567 return false;
13568 else
13569 {
13570 tree min1 = TYPE_MIN_VALUE (i1);
13571 tree min2 = TYPE_MIN_VALUE (i2);
13572 tree max1 = TYPE_MAX_VALUE (i1);
13573 tree max2 = TYPE_MAX_VALUE (i2);
13574
13575 /* The minimum/maximum values have to be the same. */
13576 if ((min1 == min2
13577 || (min1 && min2
13578 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
13579 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
13580 || operand_equal_p (min1, min2, 0))))
13581 && (max1 == max2
13582 || (max1 && max2
13583 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
13584 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
13585 || operand_equal_p (max1, max2, 0)))))
13586 return true;
13587 else
13588 return false;
13589 }
13590 }
13591
13592 case METHOD_TYPE:
13593 case FUNCTION_TYPE:
13594 /* Function types are the same if the return type and arguments types
13595 are the same. */
13596 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13597 trust_type_canonical))
13598 return false;
13599
13600 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
13601 return true;
13602 else
13603 {
13604 tree parms1, parms2;
13605
13606 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
13607 parms1 && parms2;
13608 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
13609 {
13610 if (!gimple_canonical_types_compatible_p
13611 (TREE_VALUE (parms1), TREE_VALUE (parms2),
13612 trust_type_canonical))
13613 return false;
13614 }
13615
13616 if (parms1 || parms2)
13617 return false;
13618
13619 return true;
13620 }
13621
13622 case RECORD_TYPE:
13623 case UNION_TYPE:
13624 case QUAL_UNION_TYPE:
13625 {
13626 tree f1, f2;
13627
13628 /* Don't try to compare variants of an incomplete type, before
13629 TYPE_FIELDS has been copied around. */
13630 if (!COMPLETE_TYPE_P (t1) && !COMPLETE_TYPE_P (t2))
13631 return true;
13632
13633
13634 if (TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2))
13635 return false;
13636
13637 /* For aggregate types, all the fields must be the same. */
13638 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
13639 f1 || f2;
13640 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13641 {
13642 /* Skip non-fields. */
13643 while (f1 && TREE_CODE (f1) != FIELD_DECL)
13644 f1 = TREE_CHAIN (f1);
13645 while (f2 && TREE_CODE (f2) != FIELD_DECL)
13646 f2 = TREE_CHAIN (f2);
13647 if (!f1 || !f2)
13648 break;
13649 /* The fields must have the same name, offset and type. */
13650 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
13651 || !gimple_compare_field_offset (f1, f2)
13652 || !gimple_canonical_types_compatible_p
13653 (TREE_TYPE (f1), TREE_TYPE (f2),
13654 trust_type_canonical))
13655 return false;
13656 }
13657
13658 /* If one aggregate has more fields than the other, they
13659 are not the same. */
13660 if (f1 || f2)
13661 return false;
13662
13663 return true;
13664 }
13665
13666 default:
13667 /* Consider all types with language specific trees in them mutually
13668 compatible. This is executed only from verify_type and false
13669 positives can be tolerated. */
13670 gcc_assert (!in_lto_p);
13671 return true;
13672 }
13673 }
13674
13675 /* Verify type T. */
13676
13677 void
13678 verify_type (const_tree t)
13679 {
13680 bool error_found = false;
13681 tree mv = TYPE_MAIN_VARIANT (t);
13682 if (!mv)
13683 {
13684 error ("Main variant is not defined");
13685 error_found = true;
13686 }
13687 else if (mv != TYPE_MAIN_VARIANT (mv))
13688 {
13689 error ("TYPE_MAIN_VARIANT has different TYPE_MAIN_VARIANT");
13690 debug_tree (mv);
13691 error_found = true;
13692 }
13693 else if (t != mv && !verify_type_variant (t, mv))
13694 error_found = true;
13695
13696 tree ct = TYPE_CANONICAL (t);
13697 if (!ct)
13698 ;
13699 else if (TYPE_CANONICAL (t) != ct)
13700 {
13701 error ("TYPE_CANONICAL has different TYPE_CANONICAL");
13702 debug_tree (ct);
13703 error_found = true;
13704 }
13705 /* Method and function types can not be used to address memory and thus
13706 TYPE_CANONICAL really matters only for determining useless conversions.
13707
13708 FIXME: C++ FE produce declarations of builtin functions that are not
13709 compatible with main variants. */
13710 else if (TREE_CODE (t) == FUNCTION_TYPE)
13711 ;
13712 else if (t != ct
13713 /* FIXME: gimple_canonical_types_compatible_p can not compare types
13714 with variably sized arrays because their sizes possibly
13715 gimplified to different variables. */
13716 && !variably_modified_type_p (ct, NULL)
13717 && !gimple_canonical_types_compatible_p (t, ct, false))
13718 {
13719 error ("TYPE_CANONICAL is not compatible");
13720 debug_tree (ct);
13721 error_found = true;
13722 }
13723
13724 if (COMPLETE_TYPE_P (t) && TYPE_CANONICAL (t)
13725 && TYPE_MODE (t) != TYPE_MODE (TYPE_CANONICAL (t)))
13726 {
13727 error ("TYPE_MODE of TYPE_CANONICAL is not compatible");
13728 debug_tree (ct);
13729 error_found = true;
13730 }
13731 /* FIXME: this is violated by the C++ FE as discussed in PR70029, when
13732 FUNCTION_*_QUALIFIED flags are set. */
13733 if (0 && TYPE_MAIN_VARIANT (t) == t && ct && TYPE_MAIN_VARIANT (ct) != ct)
13734 {
13735 error ("TYPE_CANONICAL of main variant is not main variant");
13736 debug_tree (ct);
13737 debug_tree (TYPE_MAIN_VARIANT (ct));
13738 error_found = true;
13739 }
13740
13741
13742 /* Check various uses of TYPE_MINVAL. */
13743 if (RECORD_OR_UNION_TYPE_P (t))
13744 {
13745 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13746 and danagle the pointer from time to time. */
13747 if (TYPE_VFIELD (t)
13748 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
13749 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
13750 {
13751 error ("TYPE_VFIELD is not FIELD_DECL nor TREE_LIST");
13752 debug_tree (TYPE_VFIELD (t));
13753 error_found = true;
13754 }
13755 }
13756 else if (TREE_CODE (t) == POINTER_TYPE)
13757 {
13758 if (TYPE_NEXT_PTR_TO (t)
13759 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
13760 {
13761 error ("TYPE_NEXT_PTR_TO is not POINTER_TYPE");
13762 debug_tree (TYPE_NEXT_PTR_TO (t));
13763 error_found = true;
13764 }
13765 }
13766 else if (TREE_CODE (t) == REFERENCE_TYPE)
13767 {
13768 if (TYPE_NEXT_REF_TO (t)
13769 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
13770 {
13771 error ("TYPE_NEXT_REF_TO is not REFERENCE_TYPE");
13772 debug_tree (TYPE_NEXT_REF_TO (t));
13773 error_found = true;
13774 }
13775 }
13776 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13777 || TREE_CODE (t) == FIXED_POINT_TYPE)
13778 {
13779 /* FIXME: The following check should pass:
13780 useless_type_conversion_p (const_cast <tree> (t),
13781 TREE_TYPE (TYPE_MIN_VALUE (t))
13782 but does not for C sizetypes in LTO. */
13783 }
13784 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
13785 else if (TYPE_MINVAL (t)
13786 && ((TREE_CODE (t) != METHOD_TYPE && TREE_CODE (t) != FUNCTION_TYPE)
13787 || in_lto_p))
13788 {
13789 error ("TYPE_MINVAL non-NULL");
13790 debug_tree (TYPE_MINVAL (t));
13791 error_found = true;
13792 }
13793
13794 /* Check various uses of TYPE_MAXVAL. */
13795 if (RECORD_OR_UNION_TYPE_P (t))
13796 {
13797 if (TYPE_METHODS (t) && TREE_CODE (TYPE_METHODS (t)) != FUNCTION_DECL
13798 && TREE_CODE (TYPE_METHODS (t)) != TEMPLATE_DECL
13799 && TYPE_METHODS (t) != error_mark_node)
13800 {
13801 error ("TYPE_METHODS is not FUNCTION_DECL, TEMPLATE_DECL nor error_mark_node");
13802 debug_tree (TYPE_METHODS (t));
13803 error_found = true;
13804 }
13805 }
13806 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13807 {
13808 if (TYPE_METHOD_BASETYPE (t)
13809 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
13810 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
13811 {
13812 error ("TYPE_METHOD_BASETYPE is not record nor union");
13813 debug_tree (TYPE_METHOD_BASETYPE (t));
13814 error_found = true;
13815 }
13816 }
13817 else if (TREE_CODE (t) == OFFSET_TYPE)
13818 {
13819 if (TYPE_OFFSET_BASETYPE (t)
13820 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
13821 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
13822 {
13823 error ("TYPE_OFFSET_BASETYPE is not record nor union");
13824 debug_tree (TYPE_OFFSET_BASETYPE (t));
13825 error_found = true;
13826 }
13827 }
13828 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13829 || TREE_CODE (t) == FIXED_POINT_TYPE)
13830 {
13831 /* FIXME: The following check should pass:
13832 useless_type_conversion_p (const_cast <tree> (t),
13833 TREE_TYPE (TYPE_MAX_VALUE (t))
13834 but does not for C sizetypes in LTO. */
13835 }
13836 else if (TREE_CODE (t) == ARRAY_TYPE)
13837 {
13838 if (TYPE_ARRAY_MAX_SIZE (t)
13839 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
13840 {
13841 error ("TYPE_ARRAY_MAX_SIZE not INTEGER_CST");
13842 debug_tree (TYPE_ARRAY_MAX_SIZE (t));
13843 error_found = true;
13844 }
13845 }
13846 else if (TYPE_MAXVAL (t))
13847 {
13848 error ("TYPE_MAXVAL non-NULL");
13849 debug_tree (TYPE_MAXVAL (t));
13850 error_found = true;
13851 }
13852
13853 /* Check various uses of TYPE_BINFO. */
13854 if (RECORD_OR_UNION_TYPE_P (t))
13855 {
13856 if (!TYPE_BINFO (t))
13857 ;
13858 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
13859 {
13860 error ("TYPE_BINFO is not TREE_BINFO");
13861 debug_tree (TYPE_BINFO (t));
13862 error_found = true;
13863 }
13864 /* FIXME: Java builds invalid empty binfos that do not have
13865 TREE_TYPE set. */
13866 else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t) && 0)
13867 {
13868 error ("TYPE_BINFO type is not TYPE_MAIN_VARIANT");
13869 debug_tree (TREE_TYPE (TYPE_BINFO (t)));
13870 error_found = true;
13871 }
13872 }
13873 else if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
13874 {
13875 error ("TYPE_LANG_SLOT_1 (binfo) field is non-NULL");
13876 debug_tree (TYPE_LANG_SLOT_1 (t));
13877 error_found = true;
13878 }
13879
13880 /* Check various uses of TYPE_VALUES_RAW. */
13881 if (TREE_CODE (t) == ENUMERAL_TYPE)
13882 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
13883 {
13884 tree value = TREE_VALUE (l);
13885 tree name = TREE_PURPOSE (l);
13886
13887 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
13888 CONST_DECL of ENUMERAL TYPE. */
13889 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
13890 {
13891 error ("Enum value is not CONST_DECL or INTEGER_CST");
13892 debug_tree (value);
13893 debug_tree (name);
13894 error_found = true;
13895 }
13896 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
13897 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
13898 {
13899 error ("Enum value type is not INTEGER_TYPE nor convertible to the enum");
13900 debug_tree (value);
13901 debug_tree (name);
13902 error_found = true;
13903 }
13904 if (TREE_CODE (name) != IDENTIFIER_NODE)
13905 {
13906 error ("Enum value name is not IDENTIFIER_NODE");
13907 debug_tree (value);
13908 debug_tree (name);
13909 error_found = true;
13910 }
13911 }
13912 else if (TREE_CODE (t) == ARRAY_TYPE)
13913 {
13914 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
13915 {
13916 error ("Array TYPE_DOMAIN is not integer type");
13917 debug_tree (TYPE_DOMAIN (t));
13918 error_found = true;
13919 }
13920 }
13921 else if (RECORD_OR_UNION_TYPE_P (t))
13922 {
13923 if (TYPE_FIELDS (t) && !COMPLETE_TYPE_P (t) && in_lto_p)
13924 {
13925 error ("TYPE_FIELDS defined in incomplete type");
13926 error_found = true;
13927 }
13928 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
13929 {
13930 /* TODO: verify properties of decls. */
13931 if (TREE_CODE (fld) == FIELD_DECL)
13932 ;
13933 else if (TREE_CODE (fld) == TYPE_DECL)
13934 ;
13935 else if (TREE_CODE (fld) == CONST_DECL)
13936 ;
13937 else if (TREE_CODE (fld) == VAR_DECL)
13938 ;
13939 else if (TREE_CODE (fld) == TEMPLATE_DECL)
13940 ;
13941 else if (TREE_CODE (fld) == USING_DECL)
13942 ;
13943 else
13944 {
13945 error ("Wrong tree in TYPE_FIELDS list");
13946 debug_tree (fld);
13947 error_found = true;
13948 }
13949 }
13950 }
13951 else if (TREE_CODE (t) == INTEGER_TYPE
13952 || TREE_CODE (t) == BOOLEAN_TYPE
13953 || TREE_CODE (t) == OFFSET_TYPE
13954 || TREE_CODE (t) == REFERENCE_TYPE
13955 || TREE_CODE (t) == NULLPTR_TYPE
13956 || TREE_CODE (t) == POINTER_TYPE)
13957 {
13958 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
13959 {
13960 error ("TYPE_CACHED_VALUES_P is %i while TYPE_CACHED_VALUES is %p",
13961 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
13962 error_found = true;
13963 }
13964 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
13965 {
13966 error ("TYPE_CACHED_VALUES is not TREE_VEC");
13967 debug_tree (TYPE_CACHED_VALUES (t));
13968 error_found = true;
13969 }
13970 /* Verify just enough of cache to ensure that no one copied it to new type.
13971 All copying should go by copy_node that should clear it. */
13972 else if (TYPE_CACHED_VALUES_P (t))
13973 {
13974 int i;
13975 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
13976 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
13977 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
13978 {
13979 error ("wrong TYPE_CACHED_VALUES entry");
13980 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
13981 error_found = true;
13982 break;
13983 }
13984 }
13985 }
13986 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13987 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
13988 {
13989 /* C++ FE uses TREE_PURPOSE to store initial values. */
13990 if (TREE_PURPOSE (l) && in_lto_p)
13991 {
13992 error ("TREE_PURPOSE is non-NULL in TYPE_ARG_TYPES list");
13993 debug_tree (l);
13994 error_found = true;
13995 }
13996 if (!TYPE_P (TREE_VALUE (l)))
13997 {
13998 error ("Wrong entry in TYPE_ARG_TYPES list");
13999 debug_tree (l);
14000 error_found = true;
14001 }
14002 }
14003 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
14004 {
14005 error ("TYPE_VALUES_RAW field is non-NULL");
14006 debug_tree (TYPE_VALUES_RAW (t));
14007 error_found = true;
14008 }
14009 if (TREE_CODE (t) != INTEGER_TYPE
14010 && TREE_CODE (t) != BOOLEAN_TYPE
14011 && TREE_CODE (t) != OFFSET_TYPE
14012 && TREE_CODE (t) != REFERENCE_TYPE
14013 && TREE_CODE (t) != NULLPTR_TYPE
14014 && TREE_CODE (t) != POINTER_TYPE
14015 && TYPE_CACHED_VALUES_P (t))
14016 {
14017 error ("TYPE_CACHED_VALUES_P is set while it should not");
14018 error_found = true;
14019 }
14020 if (TYPE_STRING_FLAG (t)
14021 && TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != INTEGER_TYPE)
14022 {
14023 error ("TYPE_STRING_FLAG is set on wrong type code");
14024 error_found = true;
14025 }
14026 else if (TYPE_STRING_FLAG (t))
14027 {
14028 const_tree b = t;
14029 if (TREE_CODE (b) == ARRAY_TYPE)
14030 b = TREE_TYPE (t);
14031 /* Java builds arrays with TYPE_STRING_FLAG of promoted_char_type
14032 that is 32bits. */
14033 if (TREE_CODE (b) != INTEGER_TYPE)
14034 {
14035 error ("TYPE_STRING_FLAG is set on type that does not look like "
14036 "char nor array of chars");
14037 error_found = true;
14038 }
14039 }
14040
14041 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
14042 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
14043 of a type. */
14044 if (TREE_CODE (t) == METHOD_TYPE
14045 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
14046 {
14047 error ("TYPE_METHOD_BASETYPE is not main variant");
14048 error_found = true;
14049 }
14050
14051 if (error_found)
14052 {
14053 debug_tree (const_cast <tree> (t));
14054 internal_error ("verify_type failed");
14055 }
14056 }
14057
14058
14059 /* Return true if ARG is marked with the nonnull attribute in the
14060 current function signature. */
14061
14062 bool
14063 nonnull_arg_p (const_tree arg)
14064 {
14065 tree t, attrs, fntype;
14066 unsigned HOST_WIDE_INT arg_num;
14067
14068 gcc_assert (TREE_CODE (arg) == PARM_DECL
14069 && (POINTER_TYPE_P (TREE_TYPE (arg))
14070 || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE));
14071
14072 /* The static chain decl is always non null. */
14073 if (arg == cfun->static_chain_decl)
14074 return true;
14075
14076 /* THIS argument of method is always non-NULL. */
14077 if (TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE
14078 && arg == DECL_ARGUMENTS (cfun->decl)
14079 && flag_delete_null_pointer_checks)
14080 return true;
14081
14082 /* Values passed by reference are always non-NULL. */
14083 if (TREE_CODE (TREE_TYPE (arg)) == REFERENCE_TYPE
14084 && flag_delete_null_pointer_checks)
14085 return true;
14086
14087 fntype = TREE_TYPE (cfun->decl);
14088 for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs))
14089 {
14090 attrs = lookup_attribute ("nonnull", attrs);
14091
14092 /* If "nonnull" wasn't specified, we know nothing about the argument. */
14093 if (attrs == NULL_TREE)
14094 return false;
14095
14096 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
14097 if (TREE_VALUE (attrs) == NULL_TREE)
14098 return true;
14099
14100 /* Get the position number for ARG in the function signature. */
14101 for (arg_num = 1, t = DECL_ARGUMENTS (cfun->decl);
14102 t;
14103 t = DECL_CHAIN (t), arg_num++)
14104 {
14105 if (t == arg)
14106 break;
14107 }
14108
14109 gcc_assert (t == arg);
14110
14111 /* Now see if ARG_NUM is mentioned in the nonnull list. */
14112 for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
14113 {
14114 if (compare_tree_int (TREE_VALUE (t), arg_num) == 0)
14115 return true;
14116 }
14117 }
14118
14119 return false;
14120 }
14121
14122 /* Given location LOC, strip away any packed range information
14123 or ad-hoc information. */
14124
14125 location_t
14126 get_pure_location (location_t loc)
14127 {
14128 if (IS_ADHOC_LOC (loc))
14129 loc
14130 = line_table->location_adhoc_data_map.data[loc & MAX_SOURCE_LOCATION].locus;
14131
14132 if (loc >= LINEMAPS_MACRO_LOWEST_LOCATION (line_table))
14133 return loc;
14134
14135 if (loc < RESERVED_LOCATION_COUNT)
14136 return loc;
14137
14138 const line_map *map = linemap_lookup (line_table, loc);
14139 const line_map_ordinary *ordmap = linemap_check_ordinary (map);
14140
14141 return loc & ~((1 << ordmap->m_range_bits) - 1);
14142 }
14143
14144 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
14145 information. */
14146
14147 location_t
14148 set_block (location_t loc, tree block)
14149 {
14150 location_t pure_loc = get_pure_location (loc);
14151 source_range src_range = get_range_from_loc (line_table, loc);
14152 return COMBINE_LOCATION_DATA (line_table, pure_loc, src_range, block);
14153 }
14154
14155 location_t
14156 set_source_range (tree expr, location_t start, location_t finish)
14157 {
14158 source_range src_range;
14159 src_range.m_start = start;
14160 src_range.m_finish = finish;
14161 return set_source_range (expr, src_range);
14162 }
14163
14164 location_t
14165 set_source_range (tree expr, source_range src_range)
14166 {
14167 if (!EXPR_P (expr))
14168 return UNKNOWN_LOCATION;
14169
14170 location_t pure_loc = get_pure_location (EXPR_LOCATION (expr));
14171 location_t adhoc = COMBINE_LOCATION_DATA (line_table,
14172 pure_loc,
14173 src_range,
14174 NULL);
14175 SET_EXPR_LOCATION (expr, adhoc);
14176 return adhoc;
14177 }
14178
14179 location_t
14180 make_location (location_t caret, location_t start, location_t finish)
14181 {
14182 location_t pure_loc = get_pure_location (caret);
14183 source_range src_range;
14184 src_range.m_start = start;
14185 src_range.m_finish = finish;
14186 location_t combined_loc = COMBINE_LOCATION_DATA (line_table,
14187 pure_loc,
14188 src_range,
14189 NULL);
14190 return combined_loc;
14191 }
14192
14193 /* Return the name of combined function FN, for debugging purposes. */
14194
14195 const char *
14196 combined_fn_name (combined_fn fn)
14197 {
14198 if (builtin_fn_p (fn))
14199 {
14200 tree fndecl = builtin_decl_explicit (as_builtin_fn (fn));
14201 return IDENTIFIER_POINTER (DECL_NAME (fndecl));
14202 }
14203 else
14204 return internal_fn_name (as_internal_fn (fn));
14205 }
14206
14207 #if CHECKING_P
14208
14209 namespace selftest {
14210
14211 /* Selftests for tree. */
14212
14213 /* Verify that integer constants are sane. */
14214
14215 static void
14216 test_integer_constants ()
14217 {
14218 ASSERT_TRUE (integer_type_node != NULL);
14219 ASSERT_TRUE (build_int_cst (integer_type_node, 0) != NULL);
14220
14221 tree type = integer_type_node;
14222
14223 tree zero = build_zero_cst (type);
14224 ASSERT_EQ (INTEGER_CST, TREE_CODE (zero));
14225 ASSERT_EQ (type, TREE_TYPE (zero));
14226
14227 tree one = build_int_cst (type, 1);
14228 ASSERT_EQ (INTEGER_CST, TREE_CODE (one));
14229 ASSERT_EQ (type, TREE_TYPE (zero));
14230 }
14231
14232 /* Verify identifiers. */
14233
14234 static void
14235 test_identifiers ()
14236 {
14237 tree identifier = get_identifier ("foo");
14238 ASSERT_EQ (3, IDENTIFIER_LENGTH (identifier));
14239 ASSERT_STREQ ("foo", IDENTIFIER_POINTER (identifier));
14240 }
14241
14242 /* Verify LABEL_DECL. */
14243
14244 static void
14245 test_labels ()
14246 {
14247 tree identifier = get_identifier ("err");
14248 tree label_decl = build_decl (UNKNOWN_LOCATION, LABEL_DECL,
14249 identifier, void_type_node);
14250 ASSERT_EQ (-1, LABEL_DECL_UID (label_decl));
14251 ASSERT_FALSE (FORCED_LABEL (label_decl));
14252 }
14253
14254 /* Run all of the selftests within this file. */
14255
14256 void
14257 tree_c_tests ()
14258 {
14259 test_integer_constants ();
14260 test_identifiers ();
14261 test_labels ();
14262 }
14263
14264 } // namespace selftest
14265
14266 #endif /* CHECKING_P */
14267
14268 #include "gt-tree.h"