]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree.c
stor-layout.c (layout_type): Move setting complex MODE to layout_type...
[thirdparty/gcc.git] / gcc / tree.c
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2016 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
26
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "backend.h"
34 #include "target.h"
35 #include "tree.h"
36 #include "gimple.h"
37 #include "tree-pass.h"
38 #include "ssa.h"
39 #include "cgraph.h"
40 #include "diagnostic.h"
41 #include "flags.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "attribs.h"
47 #include "toplev.h" /* get_random_seed */
48 #include "output.h"
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
55 #include "gimplify.h"
56 #include "tree-dfa.h"
57 #include "params.h"
58 #include "langhooks-def.h"
59 #include "tree-diagnostic.h"
60 #include "except.h"
61 #include "builtins.h"
62 #include "print-tree.h"
63 #include "ipa-utils.h"
64 #include "selftest.h"
65
66 /* Tree code classes. */
67
68 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
69 #define END_OF_BASE_TREE_CODES tcc_exceptional,
70
71 const enum tree_code_class tree_code_type[] = {
72 #include "all-tree.def"
73 };
74
75 #undef DEFTREECODE
76 #undef END_OF_BASE_TREE_CODES
77
78 /* Table indexed by tree code giving number of expression
79 operands beyond the fixed part of the node structure.
80 Not used for types or decls. */
81
82 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
83 #define END_OF_BASE_TREE_CODES 0,
84
85 const unsigned char tree_code_length[] = {
86 #include "all-tree.def"
87 };
88
89 #undef DEFTREECODE
90 #undef END_OF_BASE_TREE_CODES
91
92 /* Names of tree components.
93 Used for printing out the tree and error messages. */
94 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
95 #define END_OF_BASE_TREE_CODES "@dummy",
96
97 static const char *const tree_code_name[] = {
98 #include "all-tree.def"
99 };
100
101 #undef DEFTREECODE
102 #undef END_OF_BASE_TREE_CODES
103
104 /* Each tree code class has an associated string representation.
105 These must correspond to the tree_code_class entries. */
106
107 const char *const tree_code_class_strings[] =
108 {
109 "exceptional",
110 "constant",
111 "type",
112 "declaration",
113 "reference",
114 "comparison",
115 "unary",
116 "binary",
117 "statement",
118 "vl_exp",
119 "expression"
120 };
121
122 /* obstack.[ch] explicitly declined to prototype this. */
123 extern int _obstack_allocated_p (struct obstack *h, void *obj);
124
125 /* Statistics-gathering stuff. */
126
127 static int tree_code_counts[MAX_TREE_CODES];
128 int tree_node_counts[(int) all_kinds];
129 int tree_node_sizes[(int) all_kinds];
130
131 /* Keep in sync with tree.h:enum tree_node_kind. */
132 static const char * const tree_node_kind_names[] = {
133 "decls",
134 "types",
135 "blocks",
136 "stmts",
137 "refs",
138 "exprs",
139 "constants",
140 "identifiers",
141 "vecs",
142 "binfos",
143 "ssa names",
144 "constructors",
145 "random kinds",
146 "lang_decl kinds",
147 "lang_type kinds",
148 "omp clauses",
149 };
150
151 /* Unique id for next decl created. */
152 static GTY(()) int next_decl_uid;
153 /* Unique id for next type created. */
154 static GTY(()) int next_type_uid = 1;
155 /* Unique id for next debug decl created. Use negative numbers,
156 to catch erroneous uses. */
157 static GTY(()) int next_debug_decl_uid;
158
159 /* Since we cannot rehash a type after it is in the table, we have to
160 keep the hash code. */
161
162 struct GTY((for_user)) type_hash {
163 unsigned long hash;
164 tree type;
165 };
166
167 /* Initial size of the hash table (rounded to next prime). */
168 #define TYPE_HASH_INITIAL_SIZE 1000
169
170 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash>
171 {
172 static hashval_t hash (type_hash *t) { return t->hash; }
173 static bool equal (type_hash *a, type_hash *b);
174
175 static int
176 keep_cache_entry (type_hash *&t)
177 {
178 return ggc_marked_p (t->type);
179 }
180 };
181
182 /* Now here is the hash table. When recording a type, it is added to
183 the slot whose index is the hash code. Note that the hash table is
184 used for several kinds of types (function types, array types and
185 array index range types, for now). While all these live in the
186 same table, they are completely independent, and the hash code is
187 computed differently for each of these. */
188
189 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
190
191 /* Hash table and temporary node for larger integer const values. */
192 static GTY (()) tree int_cst_node;
193
194 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
195 {
196 static hashval_t hash (tree t);
197 static bool equal (tree x, tree y);
198 };
199
200 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
201
202 /* Hash table for optimization flags and target option flags. Use the same
203 hash table for both sets of options. Nodes for building the current
204 optimization and target option nodes. The assumption is most of the time
205 the options created will already be in the hash table, so we avoid
206 allocating and freeing up a node repeatably. */
207 static GTY (()) tree cl_optimization_node;
208 static GTY (()) tree cl_target_option_node;
209
210 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node>
211 {
212 static hashval_t hash (tree t);
213 static bool equal (tree x, tree y);
214 };
215
216 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
217
218 /* General tree->tree mapping structure for use in hash tables. */
219
220
221 static GTY ((cache))
222 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
223
224 static GTY ((cache))
225 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
226
227 struct tree_vec_map_cache_hasher : ggc_cache_ptr_hash<tree_vec_map>
228 {
229 static hashval_t hash (tree_vec_map *m) { return DECL_UID (m->base.from); }
230
231 static bool
232 equal (tree_vec_map *a, tree_vec_map *b)
233 {
234 return a->base.from == b->base.from;
235 }
236
237 static int
238 keep_cache_entry (tree_vec_map *&m)
239 {
240 return ggc_marked_p (m->base.from);
241 }
242 };
243
244 static GTY ((cache))
245 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
246
247 static void set_type_quals (tree, int);
248 static void print_type_hash_statistics (void);
249 static void print_debug_expr_statistics (void);
250 static void print_value_expr_statistics (void);
251 static void type_hash_list (const_tree, inchash::hash &);
252 static void attribute_hash_list (const_tree, inchash::hash &);
253
254 tree global_trees[TI_MAX];
255 tree integer_types[itk_none];
256
257 bool int_n_enabled_p[NUM_INT_N_ENTS];
258 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
259
260 unsigned char tree_contains_struct[MAX_TREE_CODES][64];
261
262 /* Number of operands for each OpenMP clause. */
263 unsigned const char omp_clause_num_ops[] =
264 {
265 0, /* OMP_CLAUSE_ERROR */
266 1, /* OMP_CLAUSE_PRIVATE */
267 1, /* OMP_CLAUSE_SHARED */
268 1, /* OMP_CLAUSE_FIRSTPRIVATE */
269 2, /* OMP_CLAUSE_LASTPRIVATE */
270 5, /* OMP_CLAUSE_REDUCTION */
271 1, /* OMP_CLAUSE_COPYIN */
272 1, /* OMP_CLAUSE_COPYPRIVATE */
273 3, /* OMP_CLAUSE_LINEAR */
274 2, /* OMP_CLAUSE_ALIGNED */
275 1, /* OMP_CLAUSE_DEPEND */
276 1, /* OMP_CLAUSE_UNIFORM */
277 1, /* OMP_CLAUSE_TO_DECLARE */
278 1, /* OMP_CLAUSE_LINK */
279 2, /* OMP_CLAUSE_FROM */
280 2, /* OMP_CLAUSE_TO */
281 2, /* OMP_CLAUSE_MAP */
282 1, /* OMP_CLAUSE_USE_DEVICE_PTR */
283 1, /* OMP_CLAUSE_IS_DEVICE_PTR */
284 2, /* OMP_CLAUSE__CACHE_ */
285 2, /* OMP_CLAUSE_GANG */
286 1, /* OMP_CLAUSE_ASYNC */
287 1, /* OMP_CLAUSE_WAIT */
288 0, /* OMP_CLAUSE_AUTO */
289 0, /* OMP_CLAUSE_SEQ */
290 1, /* OMP_CLAUSE__LOOPTEMP_ */
291 1, /* OMP_CLAUSE_IF */
292 1, /* OMP_CLAUSE_NUM_THREADS */
293 1, /* OMP_CLAUSE_SCHEDULE */
294 0, /* OMP_CLAUSE_NOWAIT */
295 1, /* OMP_CLAUSE_ORDERED */
296 0, /* OMP_CLAUSE_DEFAULT */
297 3, /* OMP_CLAUSE_COLLAPSE */
298 0, /* OMP_CLAUSE_UNTIED */
299 1, /* OMP_CLAUSE_FINAL */
300 0, /* OMP_CLAUSE_MERGEABLE */
301 1, /* OMP_CLAUSE_DEVICE */
302 1, /* OMP_CLAUSE_DIST_SCHEDULE */
303 0, /* OMP_CLAUSE_INBRANCH */
304 0, /* OMP_CLAUSE_NOTINBRANCH */
305 1, /* OMP_CLAUSE_NUM_TEAMS */
306 1, /* OMP_CLAUSE_THREAD_LIMIT */
307 0, /* OMP_CLAUSE_PROC_BIND */
308 1, /* OMP_CLAUSE_SAFELEN */
309 1, /* OMP_CLAUSE_SIMDLEN */
310 0, /* OMP_CLAUSE_FOR */
311 0, /* OMP_CLAUSE_PARALLEL */
312 0, /* OMP_CLAUSE_SECTIONS */
313 0, /* OMP_CLAUSE_TASKGROUP */
314 1, /* OMP_CLAUSE_PRIORITY */
315 1, /* OMP_CLAUSE_GRAINSIZE */
316 1, /* OMP_CLAUSE_NUM_TASKS */
317 0, /* OMP_CLAUSE_NOGROUP */
318 0, /* OMP_CLAUSE_THREADS */
319 0, /* OMP_CLAUSE_SIMD */
320 1, /* OMP_CLAUSE_HINT */
321 0, /* OMP_CLAUSE_DEFALTMAP */
322 1, /* OMP_CLAUSE__SIMDUID_ */
323 1, /* OMP_CLAUSE__CILK_FOR_COUNT_ */
324 0, /* OMP_CLAUSE_INDEPENDENT */
325 1, /* OMP_CLAUSE_WORKER */
326 1, /* OMP_CLAUSE_VECTOR */
327 1, /* OMP_CLAUSE_NUM_GANGS */
328 1, /* OMP_CLAUSE_NUM_WORKERS */
329 1, /* OMP_CLAUSE_VECTOR_LENGTH */
330 1, /* OMP_CLAUSE_TILE */
331 2, /* OMP_CLAUSE__GRIDDIM_ */
332 };
333
334 const char * const omp_clause_code_name[] =
335 {
336 "error_clause",
337 "private",
338 "shared",
339 "firstprivate",
340 "lastprivate",
341 "reduction",
342 "copyin",
343 "copyprivate",
344 "linear",
345 "aligned",
346 "depend",
347 "uniform",
348 "to",
349 "link",
350 "from",
351 "to",
352 "map",
353 "use_device_ptr",
354 "is_device_ptr",
355 "_cache_",
356 "gang",
357 "async",
358 "wait",
359 "auto",
360 "seq",
361 "_looptemp_",
362 "if",
363 "num_threads",
364 "schedule",
365 "nowait",
366 "ordered",
367 "default",
368 "collapse",
369 "untied",
370 "final",
371 "mergeable",
372 "device",
373 "dist_schedule",
374 "inbranch",
375 "notinbranch",
376 "num_teams",
377 "thread_limit",
378 "proc_bind",
379 "safelen",
380 "simdlen",
381 "for",
382 "parallel",
383 "sections",
384 "taskgroup",
385 "priority",
386 "grainsize",
387 "num_tasks",
388 "nogroup",
389 "threads",
390 "simd",
391 "hint",
392 "defaultmap",
393 "_simduid_",
394 "_Cilk_for_count_",
395 "independent",
396 "worker",
397 "vector",
398 "num_gangs",
399 "num_workers",
400 "vector_length",
401 "tile",
402 "_griddim_"
403 };
404
405
406 /* Return the tree node structure used by tree code CODE. */
407
408 static inline enum tree_node_structure_enum
409 tree_node_structure_for_code (enum tree_code code)
410 {
411 switch (TREE_CODE_CLASS (code))
412 {
413 case tcc_declaration:
414 {
415 switch (code)
416 {
417 case FIELD_DECL:
418 return TS_FIELD_DECL;
419 case PARM_DECL:
420 return TS_PARM_DECL;
421 case VAR_DECL:
422 return TS_VAR_DECL;
423 case LABEL_DECL:
424 return TS_LABEL_DECL;
425 case RESULT_DECL:
426 return TS_RESULT_DECL;
427 case DEBUG_EXPR_DECL:
428 return TS_DECL_WRTL;
429 case CONST_DECL:
430 return TS_CONST_DECL;
431 case TYPE_DECL:
432 return TS_TYPE_DECL;
433 case FUNCTION_DECL:
434 return TS_FUNCTION_DECL;
435 case TRANSLATION_UNIT_DECL:
436 return TS_TRANSLATION_UNIT_DECL;
437 default:
438 return TS_DECL_NON_COMMON;
439 }
440 }
441 case tcc_type:
442 return TS_TYPE_NON_COMMON;
443 case tcc_reference:
444 case tcc_comparison:
445 case tcc_unary:
446 case tcc_binary:
447 case tcc_expression:
448 case tcc_statement:
449 case tcc_vl_exp:
450 return TS_EXP;
451 default: /* tcc_constant and tcc_exceptional */
452 break;
453 }
454 switch (code)
455 {
456 /* tcc_constant cases. */
457 case VOID_CST: return TS_TYPED;
458 case INTEGER_CST: return TS_INT_CST;
459 case REAL_CST: return TS_REAL_CST;
460 case FIXED_CST: return TS_FIXED_CST;
461 case COMPLEX_CST: return TS_COMPLEX;
462 case VECTOR_CST: return TS_VECTOR;
463 case STRING_CST: return TS_STRING;
464 /* tcc_exceptional cases. */
465 case ERROR_MARK: return TS_COMMON;
466 case IDENTIFIER_NODE: return TS_IDENTIFIER;
467 case TREE_LIST: return TS_LIST;
468 case TREE_VEC: return TS_VEC;
469 case SSA_NAME: return TS_SSA_NAME;
470 case PLACEHOLDER_EXPR: return TS_COMMON;
471 case STATEMENT_LIST: return TS_STATEMENT_LIST;
472 case BLOCK: return TS_BLOCK;
473 case CONSTRUCTOR: return TS_CONSTRUCTOR;
474 case TREE_BINFO: return TS_BINFO;
475 case OMP_CLAUSE: return TS_OMP_CLAUSE;
476 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
477 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
478
479 default:
480 gcc_unreachable ();
481 }
482 }
483
484
485 /* Initialize tree_contains_struct to describe the hierarchy of tree
486 nodes. */
487
488 static void
489 initialize_tree_contains_struct (void)
490 {
491 unsigned i;
492
493 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
494 {
495 enum tree_code code;
496 enum tree_node_structure_enum ts_code;
497
498 code = (enum tree_code) i;
499 ts_code = tree_node_structure_for_code (code);
500
501 /* Mark the TS structure itself. */
502 tree_contains_struct[code][ts_code] = 1;
503
504 /* Mark all the structures that TS is derived from. */
505 switch (ts_code)
506 {
507 case TS_TYPED:
508 case TS_BLOCK:
509 MARK_TS_BASE (code);
510 break;
511
512 case TS_COMMON:
513 case TS_INT_CST:
514 case TS_REAL_CST:
515 case TS_FIXED_CST:
516 case TS_VECTOR:
517 case TS_STRING:
518 case TS_COMPLEX:
519 case TS_SSA_NAME:
520 case TS_CONSTRUCTOR:
521 case TS_EXP:
522 case TS_STATEMENT_LIST:
523 MARK_TS_TYPED (code);
524 break;
525
526 case TS_IDENTIFIER:
527 case TS_DECL_MINIMAL:
528 case TS_TYPE_COMMON:
529 case TS_LIST:
530 case TS_VEC:
531 case TS_BINFO:
532 case TS_OMP_CLAUSE:
533 case TS_OPTIMIZATION:
534 case TS_TARGET_OPTION:
535 MARK_TS_COMMON (code);
536 break;
537
538 case TS_TYPE_WITH_LANG_SPECIFIC:
539 MARK_TS_TYPE_COMMON (code);
540 break;
541
542 case TS_TYPE_NON_COMMON:
543 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
544 break;
545
546 case TS_DECL_COMMON:
547 MARK_TS_DECL_MINIMAL (code);
548 break;
549
550 case TS_DECL_WRTL:
551 case TS_CONST_DECL:
552 MARK_TS_DECL_COMMON (code);
553 break;
554
555 case TS_DECL_NON_COMMON:
556 MARK_TS_DECL_WITH_VIS (code);
557 break;
558
559 case TS_DECL_WITH_VIS:
560 case TS_PARM_DECL:
561 case TS_LABEL_DECL:
562 case TS_RESULT_DECL:
563 MARK_TS_DECL_WRTL (code);
564 break;
565
566 case TS_FIELD_DECL:
567 MARK_TS_DECL_COMMON (code);
568 break;
569
570 case TS_VAR_DECL:
571 MARK_TS_DECL_WITH_VIS (code);
572 break;
573
574 case TS_TYPE_DECL:
575 case TS_FUNCTION_DECL:
576 MARK_TS_DECL_NON_COMMON (code);
577 break;
578
579 case TS_TRANSLATION_UNIT_DECL:
580 MARK_TS_DECL_COMMON (code);
581 break;
582
583 default:
584 gcc_unreachable ();
585 }
586 }
587
588 /* Basic consistency checks for attributes used in fold. */
589 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
590 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
591 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
592 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
593 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
594 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
595 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
596 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
597 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
598 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
599 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
600 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
601 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
602 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
603 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
604 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
605 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
606 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
607 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
608 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
609 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
610 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
611 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
612 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
613 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
614 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
615 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
616 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
617 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
618 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
619 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
620 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
621 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
622 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
623 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
624 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
625 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
626 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
627 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
628 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
629 }
630
631
632 /* Init tree.c. */
633
634 void
635 init_ttree (void)
636 {
637 /* Initialize the hash table of types. */
638 type_hash_table
639 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
640
641 debug_expr_for_decl
642 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
643
644 value_expr_for_decl
645 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
646
647 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
648
649 int_cst_node = make_int_cst (1, 1);
650
651 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
652
653 cl_optimization_node = make_node (OPTIMIZATION_NODE);
654 cl_target_option_node = make_node (TARGET_OPTION_NODE);
655
656 /* Initialize the tree_contains_struct array. */
657 initialize_tree_contains_struct ();
658 lang_hooks.init_ts ();
659 }
660
661 \f
662 /* The name of the object as the assembler will see it (but before any
663 translations made by ASM_OUTPUT_LABELREF). Often this is the same
664 as DECL_NAME. It is an IDENTIFIER_NODE. */
665 tree
666 decl_assembler_name (tree decl)
667 {
668 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
669 lang_hooks.set_decl_assembler_name (decl);
670 return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name;
671 }
672
673 /* When the target supports COMDAT groups, this indicates which group the
674 DECL is associated with. This can be either an IDENTIFIER_NODE or a
675 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
676 tree
677 decl_comdat_group (const_tree node)
678 {
679 struct symtab_node *snode = symtab_node::get (node);
680 if (!snode)
681 return NULL;
682 return snode->get_comdat_group ();
683 }
684
685 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
686 tree
687 decl_comdat_group_id (const_tree node)
688 {
689 struct symtab_node *snode = symtab_node::get (node);
690 if (!snode)
691 return NULL;
692 return snode->get_comdat_group_id ();
693 }
694
695 /* When the target supports named section, return its name as IDENTIFIER_NODE
696 or NULL if it is in no section. */
697 const char *
698 decl_section_name (const_tree node)
699 {
700 struct symtab_node *snode = symtab_node::get (node);
701 if (!snode)
702 return NULL;
703 return snode->get_section ();
704 }
705
706 /* Set section name of NODE to VALUE (that is expected to be
707 identifier node) */
708 void
709 set_decl_section_name (tree node, const char *value)
710 {
711 struct symtab_node *snode;
712
713 if (value == NULL)
714 {
715 snode = symtab_node::get (node);
716 if (!snode)
717 return;
718 }
719 else if (TREE_CODE (node) == VAR_DECL)
720 snode = varpool_node::get_create (node);
721 else
722 snode = cgraph_node::get_create (node);
723 snode->set_section (value);
724 }
725
726 /* Return TLS model of a variable NODE. */
727 enum tls_model
728 decl_tls_model (const_tree node)
729 {
730 struct varpool_node *snode = varpool_node::get (node);
731 if (!snode)
732 return TLS_MODEL_NONE;
733 return snode->tls_model;
734 }
735
736 /* Set TLS model of variable NODE to MODEL. */
737 void
738 set_decl_tls_model (tree node, enum tls_model model)
739 {
740 struct varpool_node *vnode;
741
742 if (model == TLS_MODEL_NONE)
743 {
744 vnode = varpool_node::get (node);
745 if (!vnode)
746 return;
747 }
748 else
749 vnode = varpool_node::get_create (node);
750 vnode->tls_model = model;
751 }
752
753 /* Compute the number of bytes occupied by a tree with code CODE.
754 This function cannot be used for nodes that have variable sizes,
755 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
756 size_t
757 tree_code_size (enum tree_code code)
758 {
759 switch (TREE_CODE_CLASS (code))
760 {
761 case tcc_declaration: /* A decl node */
762 {
763 switch (code)
764 {
765 case FIELD_DECL:
766 return sizeof (struct tree_field_decl);
767 case PARM_DECL:
768 return sizeof (struct tree_parm_decl);
769 case VAR_DECL:
770 return sizeof (struct tree_var_decl);
771 case LABEL_DECL:
772 return sizeof (struct tree_label_decl);
773 case RESULT_DECL:
774 return sizeof (struct tree_result_decl);
775 case CONST_DECL:
776 return sizeof (struct tree_const_decl);
777 case TYPE_DECL:
778 return sizeof (struct tree_type_decl);
779 case FUNCTION_DECL:
780 return sizeof (struct tree_function_decl);
781 case DEBUG_EXPR_DECL:
782 return sizeof (struct tree_decl_with_rtl);
783 case TRANSLATION_UNIT_DECL:
784 return sizeof (struct tree_translation_unit_decl);
785 case NAMESPACE_DECL:
786 case IMPORTED_DECL:
787 case NAMELIST_DECL:
788 return sizeof (struct tree_decl_non_common);
789 default:
790 return lang_hooks.tree_size (code);
791 }
792 }
793
794 case tcc_type: /* a type node */
795 return sizeof (struct tree_type_non_common);
796
797 case tcc_reference: /* a reference */
798 case tcc_expression: /* an expression */
799 case tcc_statement: /* an expression with side effects */
800 case tcc_comparison: /* a comparison expression */
801 case tcc_unary: /* a unary arithmetic expression */
802 case tcc_binary: /* a binary arithmetic expression */
803 return (sizeof (struct tree_exp)
804 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
805
806 case tcc_constant: /* a constant */
807 switch (code)
808 {
809 case VOID_CST: return sizeof (struct tree_typed);
810 case INTEGER_CST: gcc_unreachable ();
811 case REAL_CST: return sizeof (struct tree_real_cst);
812 case FIXED_CST: return sizeof (struct tree_fixed_cst);
813 case COMPLEX_CST: return sizeof (struct tree_complex);
814 case VECTOR_CST: return sizeof (struct tree_vector);
815 case STRING_CST: gcc_unreachable ();
816 default:
817 return lang_hooks.tree_size (code);
818 }
819
820 case tcc_exceptional: /* something random, like an identifier. */
821 switch (code)
822 {
823 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
824 case TREE_LIST: return sizeof (struct tree_list);
825
826 case ERROR_MARK:
827 case PLACEHOLDER_EXPR: return sizeof (struct tree_common);
828
829 case TREE_VEC:
830 case OMP_CLAUSE: gcc_unreachable ();
831
832 case SSA_NAME: return sizeof (struct tree_ssa_name);
833
834 case STATEMENT_LIST: return sizeof (struct tree_statement_list);
835 case BLOCK: return sizeof (struct tree_block);
836 case CONSTRUCTOR: return sizeof (struct tree_constructor);
837 case OPTIMIZATION_NODE: return sizeof (struct tree_optimization_option);
838 case TARGET_OPTION_NODE: return sizeof (struct tree_target_option);
839
840 default:
841 return lang_hooks.tree_size (code);
842 }
843
844 default:
845 gcc_unreachable ();
846 }
847 }
848
849 /* Compute the number of bytes occupied by NODE. This routine only
850 looks at TREE_CODE, except for those nodes that have variable sizes. */
851 size_t
852 tree_size (const_tree node)
853 {
854 const enum tree_code code = TREE_CODE (node);
855 switch (code)
856 {
857 case INTEGER_CST:
858 return (sizeof (struct tree_int_cst)
859 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
860
861 case TREE_BINFO:
862 return (offsetof (struct tree_binfo, base_binfos)
863 + vec<tree, va_gc>
864 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
865
866 case TREE_VEC:
867 return (sizeof (struct tree_vec)
868 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
869
870 case VECTOR_CST:
871 return (sizeof (struct tree_vector)
872 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node)) - 1) * sizeof (tree));
873
874 case STRING_CST:
875 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
876
877 case OMP_CLAUSE:
878 return (sizeof (struct tree_omp_clause)
879 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
880 * sizeof (tree));
881
882 default:
883 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
884 return (sizeof (struct tree_exp)
885 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
886 else
887 return tree_code_size (code);
888 }
889 }
890
891 /* Record interesting allocation statistics for a tree node with CODE
892 and LENGTH. */
893
894 static void
895 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
896 size_t length ATTRIBUTE_UNUSED)
897 {
898 enum tree_code_class type = TREE_CODE_CLASS (code);
899 tree_node_kind kind;
900
901 if (!GATHER_STATISTICS)
902 return;
903
904 switch (type)
905 {
906 case tcc_declaration: /* A decl node */
907 kind = d_kind;
908 break;
909
910 case tcc_type: /* a type node */
911 kind = t_kind;
912 break;
913
914 case tcc_statement: /* an expression with side effects */
915 kind = s_kind;
916 break;
917
918 case tcc_reference: /* a reference */
919 kind = r_kind;
920 break;
921
922 case tcc_expression: /* an expression */
923 case tcc_comparison: /* a comparison expression */
924 case tcc_unary: /* a unary arithmetic expression */
925 case tcc_binary: /* a binary arithmetic expression */
926 kind = e_kind;
927 break;
928
929 case tcc_constant: /* a constant */
930 kind = c_kind;
931 break;
932
933 case tcc_exceptional: /* something random, like an identifier. */
934 switch (code)
935 {
936 case IDENTIFIER_NODE:
937 kind = id_kind;
938 break;
939
940 case TREE_VEC:
941 kind = vec_kind;
942 break;
943
944 case TREE_BINFO:
945 kind = binfo_kind;
946 break;
947
948 case SSA_NAME:
949 kind = ssa_name_kind;
950 break;
951
952 case BLOCK:
953 kind = b_kind;
954 break;
955
956 case CONSTRUCTOR:
957 kind = constr_kind;
958 break;
959
960 case OMP_CLAUSE:
961 kind = omp_clause_kind;
962 break;
963
964 default:
965 kind = x_kind;
966 break;
967 }
968 break;
969
970 case tcc_vl_exp:
971 kind = e_kind;
972 break;
973
974 default:
975 gcc_unreachable ();
976 }
977
978 tree_code_counts[(int) code]++;
979 tree_node_counts[(int) kind]++;
980 tree_node_sizes[(int) kind] += length;
981 }
982
983 /* Allocate and return a new UID from the DECL_UID namespace. */
984
985 int
986 allocate_decl_uid (void)
987 {
988 return next_decl_uid++;
989 }
990
991 /* Return a newly allocated node of code CODE. For decl and type
992 nodes, some other fields are initialized. The rest of the node is
993 initialized to zero. This function cannot be used for TREE_VEC,
994 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
995 tree_code_size.
996
997 Achoo! I got a code in the node. */
998
999 tree
1000 make_node_stat (enum tree_code code MEM_STAT_DECL)
1001 {
1002 tree t;
1003 enum tree_code_class type = TREE_CODE_CLASS (code);
1004 size_t length = tree_code_size (code);
1005
1006 record_node_allocation_statistics (code, length);
1007
1008 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1009 TREE_SET_CODE (t, code);
1010
1011 switch (type)
1012 {
1013 case tcc_statement:
1014 TREE_SIDE_EFFECTS (t) = 1;
1015 break;
1016
1017 case tcc_declaration:
1018 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1019 {
1020 if (code == FUNCTION_DECL)
1021 {
1022 SET_DECL_ALIGN (t, FUNCTION_BOUNDARY);
1023 DECL_MODE (t) = FUNCTION_MODE;
1024 }
1025 else
1026 SET_DECL_ALIGN (t, 1);
1027 }
1028 DECL_SOURCE_LOCATION (t) = input_location;
1029 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1030 DECL_UID (t) = --next_debug_decl_uid;
1031 else
1032 {
1033 DECL_UID (t) = allocate_decl_uid ();
1034 SET_DECL_PT_UID (t, -1);
1035 }
1036 if (TREE_CODE (t) == LABEL_DECL)
1037 LABEL_DECL_UID (t) = -1;
1038
1039 break;
1040
1041 case tcc_type:
1042 TYPE_UID (t) = next_type_uid++;
1043 SET_TYPE_ALIGN (t, BITS_PER_UNIT);
1044 TYPE_USER_ALIGN (t) = 0;
1045 TYPE_MAIN_VARIANT (t) = t;
1046 TYPE_CANONICAL (t) = t;
1047
1048 /* Default to no attributes for type, but let target change that. */
1049 TYPE_ATTRIBUTES (t) = NULL_TREE;
1050 targetm.set_default_type_attributes (t);
1051
1052 /* We have not yet computed the alias set for this type. */
1053 TYPE_ALIAS_SET (t) = -1;
1054 break;
1055
1056 case tcc_constant:
1057 TREE_CONSTANT (t) = 1;
1058 break;
1059
1060 case tcc_expression:
1061 switch (code)
1062 {
1063 case INIT_EXPR:
1064 case MODIFY_EXPR:
1065 case VA_ARG_EXPR:
1066 case PREDECREMENT_EXPR:
1067 case PREINCREMENT_EXPR:
1068 case POSTDECREMENT_EXPR:
1069 case POSTINCREMENT_EXPR:
1070 /* All of these have side-effects, no matter what their
1071 operands are. */
1072 TREE_SIDE_EFFECTS (t) = 1;
1073 break;
1074
1075 default:
1076 break;
1077 }
1078 break;
1079
1080 case tcc_exceptional:
1081 switch (code)
1082 {
1083 case TARGET_OPTION_NODE:
1084 TREE_TARGET_OPTION(t)
1085 = ggc_cleared_alloc<struct cl_target_option> ();
1086 break;
1087
1088 case OPTIMIZATION_NODE:
1089 TREE_OPTIMIZATION (t)
1090 = ggc_cleared_alloc<struct cl_optimization> ();
1091 break;
1092
1093 default:
1094 break;
1095 }
1096 break;
1097
1098 default:
1099 /* Other classes need no special treatment. */
1100 break;
1101 }
1102
1103 return t;
1104 }
1105
1106 /* Free tree node. */
1107
1108 void
1109 free_node (tree node)
1110 {
1111 enum tree_code code = TREE_CODE (node);
1112 if (GATHER_STATISTICS)
1113 {
1114 tree_code_counts[(int) TREE_CODE (node)]--;
1115 tree_node_counts[(int) t_kind]--;
1116 tree_node_sizes[(int) t_kind] -= tree_size (node);
1117 }
1118 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1119 vec_free (CONSTRUCTOR_ELTS (node));
1120 else if (code == BLOCK)
1121 vec_free (BLOCK_NONLOCALIZED_VARS (node));
1122 else if (code == TREE_BINFO)
1123 vec_free (BINFO_BASE_ACCESSES (node));
1124 ggc_free (node);
1125 }
1126 \f
1127 /* Return a new node with the same contents as NODE except that its
1128 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1129
1130 tree
1131 copy_node_stat (tree node MEM_STAT_DECL)
1132 {
1133 tree t;
1134 enum tree_code code = TREE_CODE (node);
1135 size_t length;
1136
1137 gcc_assert (code != STATEMENT_LIST);
1138
1139 length = tree_size (node);
1140 record_node_allocation_statistics (code, length);
1141 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1142 memcpy (t, node, length);
1143
1144 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1145 TREE_CHAIN (t) = 0;
1146 TREE_ASM_WRITTEN (t) = 0;
1147 TREE_VISITED (t) = 0;
1148
1149 if (TREE_CODE_CLASS (code) == tcc_declaration)
1150 {
1151 if (code == DEBUG_EXPR_DECL)
1152 DECL_UID (t) = --next_debug_decl_uid;
1153 else
1154 {
1155 DECL_UID (t) = allocate_decl_uid ();
1156 if (DECL_PT_UID_SET_P (node))
1157 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1158 }
1159 if ((TREE_CODE (node) == PARM_DECL || TREE_CODE (node) == VAR_DECL)
1160 && DECL_HAS_VALUE_EXPR_P (node))
1161 {
1162 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1163 DECL_HAS_VALUE_EXPR_P (t) = 1;
1164 }
1165 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1166 if (TREE_CODE (node) == VAR_DECL)
1167 {
1168 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1169 t->decl_with_vis.symtab_node = NULL;
1170 }
1171 if (TREE_CODE (node) == VAR_DECL && DECL_HAS_INIT_PRIORITY_P (node))
1172 {
1173 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1174 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1175 }
1176 if (TREE_CODE (node) == FUNCTION_DECL)
1177 {
1178 DECL_STRUCT_FUNCTION (t) = NULL;
1179 t->decl_with_vis.symtab_node = NULL;
1180 }
1181 }
1182 else if (TREE_CODE_CLASS (code) == tcc_type)
1183 {
1184 TYPE_UID (t) = next_type_uid++;
1185 /* The following is so that the debug code for
1186 the copy is different from the original type.
1187 The two statements usually duplicate each other
1188 (because they clear fields of the same union),
1189 but the optimizer should catch that. */
1190 TYPE_SYMTAB_POINTER (t) = 0;
1191 TYPE_SYMTAB_ADDRESS (t) = 0;
1192
1193 /* Do not copy the values cache. */
1194 if (TYPE_CACHED_VALUES_P (t))
1195 {
1196 TYPE_CACHED_VALUES_P (t) = 0;
1197 TYPE_CACHED_VALUES (t) = NULL_TREE;
1198 }
1199 }
1200 else if (code == TARGET_OPTION_NODE)
1201 {
1202 TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>();
1203 memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node),
1204 sizeof (struct cl_target_option));
1205 }
1206 else if (code == OPTIMIZATION_NODE)
1207 {
1208 TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>();
1209 memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node),
1210 sizeof (struct cl_optimization));
1211 }
1212
1213 return t;
1214 }
1215
1216 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1217 For example, this can copy a list made of TREE_LIST nodes. */
1218
1219 tree
1220 copy_list (tree list)
1221 {
1222 tree head;
1223 tree prev, next;
1224
1225 if (list == 0)
1226 return 0;
1227
1228 head = prev = copy_node (list);
1229 next = TREE_CHAIN (list);
1230 while (next)
1231 {
1232 TREE_CHAIN (prev) = copy_node (next);
1233 prev = TREE_CHAIN (prev);
1234 next = TREE_CHAIN (next);
1235 }
1236 return head;
1237 }
1238
1239 \f
1240 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1241 INTEGER_CST with value CST and type TYPE. */
1242
1243 static unsigned int
1244 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1245 {
1246 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1247 /* We need extra HWIs if CST is an unsigned integer with its
1248 upper bit set. */
1249 if (TYPE_UNSIGNED (type) && wi::neg_p (cst))
1250 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1251 return cst.get_len ();
1252 }
1253
1254 /* Return a new INTEGER_CST with value CST and type TYPE. */
1255
1256 static tree
1257 build_new_int_cst (tree type, const wide_int &cst)
1258 {
1259 unsigned int len = cst.get_len ();
1260 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1261 tree nt = make_int_cst (len, ext_len);
1262
1263 if (len < ext_len)
1264 {
1265 --ext_len;
1266 TREE_INT_CST_ELT (nt, ext_len)
1267 = zext_hwi (-1, cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1268 for (unsigned int i = len; i < ext_len; ++i)
1269 TREE_INT_CST_ELT (nt, i) = -1;
1270 }
1271 else if (TYPE_UNSIGNED (type)
1272 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1273 {
1274 len--;
1275 TREE_INT_CST_ELT (nt, len)
1276 = zext_hwi (cst.elt (len),
1277 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1278 }
1279
1280 for (unsigned int i = 0; i < len; i++)
1281 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1282 TREE_TYPE (nt) = type;
1283 return nt;
1284 }
1285
1286 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1287
1288 tree
1289 build_int_cst (tree type, HOST_WIDE_INT low)
1290 {
1291 /* Support legacy code. */
1292 if (!type)
1293 type = integer_type_node;
1294
1295 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1296 }
1297
1298 tree
1299 build_int_cstu (tree type, unsigned HOST_WIDE_INT cst)
1300 {
1301 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1302 }
1303
1304 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1305
1306 tree
1307 build_int_cst_type (tree type, HOST_WIDE_INT low)
1308 {
1309 gcc_assert (type);
1310 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1311 }
1312
1313 /* Constructs tree in type TYPE from with value given by CST. Signedness
1314 of CST is assumed to be the same as the signedness of TYPE. */
1315
1316 tree
1317 double_int_to_tree (tree type, double_int cst)
1318 {
1319 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1320 }
1321
1322 /* We force the wide_int CST to the range of the type TYPE by sign or
1323 zero extending it. OVERFLOWABLE indicates if we are interested in
1324 overflow of the value, when >0 we are only interested in signed
1325 overflow, for <0 we are interested in any overflow. OVERFLOWED
1326 indicates whether overflow has already occurred. CONST_OVERFLOWED
1327 indicates whether constant overflow has already occurred. We force
1328 T's value to be within range of T's type (by setting to 0 or 1 all
1329 the bits outside the type's range). We set TREE_OVERFLOWED if,
1330 OVERFLOWED is nonzero,
1331 or OVERFLOWABLE is >0 and signed overflow occurs
1332 or OVERFLOWABLE is <0 and any overflow occurs
1333 We return a new tree node for the extended wide_int. The node
1334 is shared if no overflow flags are set. */
1335
1336
1337 tree
1338 force_fit_type (tree type, const wide_int_ref &cst,
1339 int overflowable, bool overflowed)
1340 {
1341 signop sign = TYPE_SIGN (type);
1342
1343 /* If we need to set overflow flags, return a new unshared node. */
1344 if (overflowed || !wi::fits_to_tree_p (cst, type))
1345 {
1346 if (overflowed
1347 || overflowable < 0
1348 || (overflowable > 0 && sign == SIGNED))
1349 {
1350 wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign);
1351 tree t = build_new_int_cst (type, tmp);
1352 TREE_OVERFLOW (t) = 1;
1353 return t;
1354 }
1355 }
1356
1357 /* Else build a shared node. */
1358 return wide_int_to_tree (type, cst);
1359 }
1360
1361 /* These are the hash table functions for the hash table of INTEGER_CST
1362 nodes of a sizetype. */
1363
1364 /* Return the hash code X, an INTEGER_CST. */
1365
1366 hashval_t
1367 int_cst_hasher::hash (tree x)
1368 {
1369 const_tree const t = x;
1370 hashval_t code = TYPE_UID (TREE_TYPE (t));
1371 int i;
1372
1373 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1374 code = iterative_hash_host_wide_int (TREE_INT_CST_ELT(t, i), code);
1375
1376 return code;
1377 }
1378
1379 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1380 is the same as that given by *Y, which is the same. */
1381
1382 bool
1383 int_cst_hasher::equal (tree x, tree y)
1384 {
1385 const_tree const xt = x;
1386 const_tree const yt = y;
1387
1388 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1389 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1390 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1391 return false;
1392
1393 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1394 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1395 return false;
1396
1397 return true;
1398 }
1399
1400 /* Create an INT_CST node of TYPE and value CST.
1401 The returned node is always shared. For small integers we use a
1402 per-type vector cache, for larger ones we use a single hash table.
1403 The value is extended from its precision according to the sign of
1404 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1405 the upper bits and ensures that hashing and value equality based
1406 upon the underlying HOST_WIDE_INTs works without masking. */
1407
1408 tree
1409 wide_int_to_tree (tree type, const wide_int_ref &pcst)
1410 {
1411 tree t;
1412 int ix = -1;
1413 int limit = 0;
1414
1415 gcc_assert (type);
1416 unsigned int prec = TYPE_PRECISION (type);
1417 signop sgn = TYPE_SIGN (type);
1418
1419 /* Verify that everything is canonical. */
1420 int l = pcst.get_len ();
1421 if (l > 1)
1422 {
1423 if (pcst.elt (l - 1) == 0)
1424 gcc_checking_assert (pcst.elt (l - 2) < 0);
1425 if (pcst.elt (l - 1) == (HOST_WIDE_INT) -1)
1426 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1427 }
1428
1429 wide_int cst = wide_int::from (pcst, prec, sgn);
1430 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1431
1432 if (ext_len == 1)
1433 {
1434 /* We just need to store a single HOST_WIDE_INT. */
1435 HOST_WIDE_INT hwi;
1436 if (TYPE_UNSIGNED (type))
1437 hwi = cst.to_uhwi ();
1438 else
1439 hwi = cst.to_shwi ();
1440
1441 switch (TREE_CODE (type))
1442 {
1443 case NULLPTR_TYPE:
1444 gcc_assert (hwi == 0);
1445 /* Fallthru. */
1446
1447 case POINTER_TYPE:
1448 case REFERENCE_TYPE:
1449 case POINTER_BOUNDS_TYPE:
1450 /* Cache NULL pointer and zero bounds. */
1451 if (hwi == 0)
1452 {
1453 limit = 1;
1454 ix = 0;
1455 }
1456 break;
1457
1458 case BOOLEAN_TYPE:
1459 /* Cache false or true. */
1460 limit = 2;
1461 if (IN_RANGE (hwi, 0, 1))
1462 ix = hwi;
1463 break;
1464
1465 case INTEGER_TYPE:
1466 case OFFSET_TYPE:
1467 if (TYPE_SIGN (type) == UNSIGNED)
1468 {
1469 /* Cache [0, N). */
1470 limit = INTEGER_SHARE_LIMIT;
1471 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1))
1472 ix = hwi;
1473 }
1474 else
1475 {
1476 /* Cache [-1, N). */
1477 limit = INTEGER_SHARE_LIMIT + 1;
1478 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1))
1479 ix = hwi + 1;
1480 }
1481 break;
1482
1483 case ENUMERAL_TYPE:
1484 break;
1485
1486 default:
1487 gcc_unreachable ();
1488 }
1489
1490 if (ix >= 0)
1491 {
1492 /* Look for it in the type's vector of small shared ints. */
1493 if (!TYPE_CACHED_VALUES_P (type))
1494 {
1495 TYPE_CACHED_VALUES_P (type) = 1;
1496 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1497 }
1498
1499 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1500 if (t)
1501 /* Make sure no one is clobbering the shared constant. */
1502 gcc_checking_assert (TREE_TYPE (t) == type
1503 && TREE_INT_CST_NUNITS (t) == 1
1504 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1505 && TREE_INT_CST_EXT_NUNITS (t) == 1
1506 && TREE_INT_CST_ELT (t, 0) == hwi);
1507 else
1508 {
1509 /* Create a new shared int. */
1510 t = build_new_int_cst (type, cst);
1511 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1512 }
1513 }
1514 else
1515 {
1516 /* Use the cache of larger shared ints, using int_cst_node as
1517 a temporary. */
1518
1519 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1520 TREE_TYPE (int_cst_node) = type;
1521
1522 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1523 t = *slot;
1524 if (!t)
1525 {
1526 /* Insert this one into the hash table. */
1527 t = int_cst_node;
1528 *slot = t;
1529 /* Make a new node for next time round. */
1530 int_cst_node = make_int_cst (1, 1);
1531 }
1532 }
1533 }
1534 else
1535 {
1536 /* The value either hashes properly or we drop it on the floor
1537 for the gc to take care of. There will not be enough of them
1538 to worry about. */
1539
1540 tree nt = build_new_int_cst (type, cst);
1541 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1542 t = *slot;
1543 if (!t)
1544 {
1545 /* Insert this one into the hash table. */
1546 t = nt;
1547 *slot = t;
1548 }
1549 }
1550
1551 return t;
1552 }
1553
1554 void
1555 cache_integer_cst (tree t)
1556 {
1557 tree type = TREE_TYPE (t);
1558 int ix = -1;
1559 int limit = 0;
1560 int prec = TYPE_PRECISION (type);
1561
1562 gcc_assert (!TREE_OVERFLOW (t));
1563
1564 switch (TREE_CODE (type))
1565 {
1566 case NULLPTR_TYPE:
1567 gcc_assert (integer_zerop (t));
1568 /* Fallthru. */
1569
1570 case POINTER_TYPE:
1571 case REFERENCE_TYPE:
1572 /* Cache NULL pointer. */
1573 if (integer_zerop (t))
1574 {
1575 limit = 1;
1576 ix = 0;
1577 }
1578 break;
1579
1580 case BOOLEAN_TYPE:
1581 /* Cache false or true. */
1582 limit = 2;
1583 if (wi::ltu_p (t, 2))
1584 ix = TREE_INT_CST_ELT (t, 0);
1585 break;
1586
1587 case INTEGER_TYPE:
1588 case OFFSET_TYPE:
1589 if (TYPE_UNSIGNED (type))
1590 {
1591 /* Cache 0..N */
1592 limit = INTEGER_SHARE_LIMIT;
1593
1594 /* This is a little hokie, but if the prec is smaller than
1595 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1596 obvious test will not get the correct answer. */
1597 if (prec < HOST_BITS_PER_WIDE_INT)
1598 {
1599 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1600 ix = tree_to_uhwi (t);
1601 }
1602 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1603 ix = tree_to_uhwi (t);
1604 }
1605 else
1606 {
1607 /* Cache -1..N */
1608 limit = INTEGER_SHARE_LIMIT + 1;
1609
1610 if (integer_minus_onep (t))
1611 ix = 0;
1612 else if (!wi::neg_p (t))
1613 {
1614 if (prec < HOST_BITS_PER_WIDE_INT)
1615 {
1616 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1617 ix = tree_to_shwi (t) + 1;
1618 }
1619 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1620 ix = tree_to_shwi (t) + 1;
1621 }
1622 }
1623 break;
1624
1625 case ENUMERAL_TYPE:
1626 break;
1627
1628 default:
1629 gcc_unreachable ();
1630 }
1631
1632 if (ix >= 0)
1633 {
1634 /* Look for it in the type's vector of small shared ints. */
1635 if (!TYPE_CACHED_VALUES_P (type))
1636 {
1637 TYPE_CACHED_VALUES_P (type) = 1;
1638 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1639 }
1640
1641 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1642 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1643 }
1644 else
1645 {
1646 /* Use the cache of larger shared ints. */
1647 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1648 /* If there is already an entry for the number verify it's the
1649 same. */
1650 if (*slot)
1651 gcc_assert (wi::eq_p (tree (*slot), t));
1652 else
1653 /* Otherwise insert this one into the hash table. */
1654 *slot = t;
1655 }
1656 }
1657
1658
1659 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1660 and the rest are zeros. */
1661
1662 tree
1663 build_low_bits_mask (tree type, unsigned bits)
1664 {
1665 gcc_assert (bits <= TYPE_PRECISION (type));
1666
1667 return wide_int_to_tree (type, wi::mask (bits, false,
1668 TYPE_PRECISION (type)));
1669 }
1670
1671 /* Checks that X is integer constant that can be expressed in (unsigned)
1672 HOST_WIDE_INT without loss of precision. */
1673
1674 bool
1675 cst_and_fits_in_hwi (const_tree x)
1676 {
1677 return (TREE_CODE (x) == INTEGER_CST
1678 && TYPE_PRECISION (TREE_TYPE (x)) <= HOST_BITS_PER_WIDE_INT);
1679 }
1680
1681 /* Build a newly constructed VECTOR_CST node of length LEN. */
1682
1683 tree
1684 make_vector_stat (unsigned len MEM_STAT_DECL)
1685 {
1686 tree t;
1687 unsigned length = (len - 1) * sizeof (tree) + sizeof (struct tree_vector);
1688
1689 record_node_allocation_statistics (VECTOR_CST, length);
1690
1691 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1692
1693 TREE_SET_CODE (t, VECTOR_CST);
1694 TREE_CONSTANT (t) = 1;
1695
1696 return t;
1697 }
1698
1699 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1700 are in a list pointed to by VALS. */
1701
1702 tree
1703 build_vector_stat (tree type, tree *vals MEM_STAT_DECL)
1704 {
1705 int over = 0;
1706 unsigned cnt = 0;
1707 tree v = make_vector (TYPE_VECTOR_SUBPARTS (type));
1708 TREE_TYPE (v) = type;
1709
1710 /* Iterate through elements and check for overflow. */
1711 for (cnt = 0; cnt < TYPE_VECTOR_SUBPARTS (type); ++cnt)
1712 {
1713 tree value = vals[cnt];
1714
1715 VECTOR_CST_ELT (v, cnt) = value;
1716
1717 /* Don't crash if we get an address constant. */
1718 if (!CONSTANT_CLASS_P (value))
1719 continue;
1720
1721 over |= TREE_OVERFLOW (value);
1722 }
1723
1724 TREE_OVERFLOW (v) = over;
1725 return v;
1726 }
1727
1728 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1729 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1730
1731 tree
1732 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1733 {
1734 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
1735 unsigned HOST_WIDE_INT idx, pos = 0;
1736 tree value;
1737
1738 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1739 {
1740 if (TREE_CODE (value) == VECTOR_CST)
1741 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
1742 vec[pos++] = VECTOR_CST_ELT (value, i);
1743 else
1744 vec[pos++] = value;
1745 }
1746 while (pos < TYPE_VECTOR_SUBPARTS (type))
1747 vec[pos++] = build_zero_cst (TREE_TYPE (type));
1748
1749 return build_vector (type, vec);
1750 }
1751
1752 /* Build a vector of type VECTYPE where all the elements are SCs. */
1753 tree
1754 build_vector_from_val (tree vectype, tree sc)
1755 {
1756 int i, nunits = TYPE_VECTOR_SUBPARTS (vectype);
1757
1758 if (sc == error_mark_node)
1759 return sc;
1760
1761 /* Verify that the vector type is suitable for SC. Note that there
1762 is some inconsistency in the type-system with respect to restrict
1763 qualifications of pointers. Vector types always have a main-variant
1764 element type and the qualification is applied to the vector-type.
1765 So TREE_TYPE (vector-type) does not return a properly qualified
1766 vector element-type. */
1767 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1768 TREE_TYPE (vectype)));
1769
1770 if (CONSTANT_CLASS_P (sc))
1771 {
1772 tree *v = XALLOCAVEC (tree, nunits);
1773 for (i = 0; i < nunits; ++i)
1774 v[i] = sc;
1775 return build_vector (vectype, v);
1776 }
1777 else
1778 {
1779 vec<constructor_elt, va_gc> *v;
1780 vec_alloc (v, nunits);
1781 for (i = 0; i < nunits; ++i)
1782 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1783 return build_constructor (vectype, v);
1784 }
1785 }
1786
1787 /* Something has messed with the elements of CONSTRUCTOR C after it was built;
1788 calculate TREE_CONSTANT and TREE_SIDE_EFFECTS. */
1789
1790 void
1791 recompute_constructor_flags (tree c)
1792 {
1793 unsigned int i;
1794 tree val;
1795 bool constant_p = true;
1796 bool side_effects_p = false;
1797 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
1798
1799 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
1800 {
1801 /* Mostly ctors will have elts that don't have side-effects, so
1802 the usual case is to scan all the elements. Hence a single
1803 loop for both const and side effects, rather than one loop
1804 each (with early outs). */
1805 if (!TREE_CONSTANT (val))
1806 constant_p = false;
1807 if (TREE_SIDE_EFFECTS (val))
1808 side_effects_p = true;
1809 }
1810
1811 TREE_SIDE_EFFECTS (c) = side_effects_p;
1812 TREE_CONSTANT (c) = constant_p;
1813 }
1814
1815 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for
1816 CONSTRUCTOR C. */
1817
1818 void
1819 verify_constructor_flags (tree c)
1820 {
1821 unsigned int i;
1822 tree val;
1823 bool constant_p = TREE_CONSTANT (c);
1824 bool side_effects_p = TREE_SIDE_EFFECTS (c);
1825 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
1826
1827 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
1828 {
1829 if (constant_p && !TREE_CONSTANT (val))
1830 internal_error ("non-constant element in constant CONSTRUCTOR");
1831 if (!side_effects_p && TREE_SIDE_EFFECTS (val))
1832 internal_error ("side-effects element in no-side-effects CONSTRUCTOR");
1833 }
1834 }
1835
1836 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1837 are in the vec pointed to by VALS. */
1838 tree
1839 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
1840 {
1841 tree c = make_node (CONSTRUCTOR);
1842
1843 TREE_TYPE (c) = type;
1844 CONSTRUCTOR_ELTS (c) = vals;
1845
1846 recompute_constructor_flags (c);
1847
1848 return c;
1849 }
1850
1851 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1852 INDEX and VALUE. */
1853 tree
1854 build_constructor_single (tree type, tree index, tree value)
1855 {
1856 vec<constructor_elt, va_gc> *v;
1857 constructor_elt elt = {index, value};
1858
1859 vec_alloc (v, 1);
1860 v->quick_push (elt);
1861
1862 return build_constructor (type, v);
1863 }
1864
1865
1866 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1867 are in a list pointed to by VALS. */
1868 tree
1869 build_constructor_from_list (tree type, tree vals)
1870 {
1871 tree t;
1872 vec<constructor_elt, va_gc> *v = NULL;
1873
1874 if (vals)
1875 {
1876 vec_alloc (v, list_length (vals));
1877 for (t = vals; t; t = TREE_CHAIN (t))
1878 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
1879 }
1880
1881 return build_constructor (type, v);
1882 }
1883
1884 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1885 of elements, provided as index/value pairs. */
1886
1887 tree
1888 build_constructor_va (tree type, int nelts, ...)
1889 {
1890 vec<constructor_elt, va_gc> *v = NULL;
1891 va_list p;
1892
1893 va_start (p, nelts);
1894 vec_alloc (v, nelts);
1895 while (nelts--)
1896 {
1897 tree index = va_arg (p, tree);
1898 tree value = va_arg (p, tree);
1899 CONSTRUCTOR_APPEND_ELT (v, index, value);
1900 }
1901 va_end (p);
1902 return build_constructor (type, v);
1903 }
1904
1905 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1906
1907 tree
1908 build_fixed (tree type, FIXED_VALUE_TYPE f)
1909 {
1910 tree v;
1911 FIXED_VALUE_TYPE *fp;
1912
1913 v = make_node (FIXED_CST);
1914 fp = ggc_alloc<fixed_value> ();
1915 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
1916
1917 TREE_TYPE (v) = type;
1918 TREE_FIXED_CST_PTR (v) = fp;
1919 return v;
1920 }
1921
1922 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1923
1924 tree
1925 build_real (tree type, REAL_VALUE_TYPE d)
1926 {
1927 tree v;
1928 REAL_VALUE_TYPE *dp;
1929 int overflow = 0;
1930
1931 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1932 Consider doing it via real_convert now. */
1933
1934 v = make_node (REAL_CST);
1935 dp = ggc_alloc<real_value> ();
1936 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
1937
1938 TREE_TYPE (v) = type;
1939 TREE_REAL_CST_PTR (v) = dp;
1940 TREE_OVERFLOW (v) = overflow;
1941 return v;
1942 }
1943
1944 /* Like build_real, but first truncate D to the type. */
1945
1946 tree
1947 build_real_truncate (tree type, REAL_VALUE_TYPE d)
1948 {
1949 return build_real (type, real_value_truncate (TYPE_MODE (type), d));
1950 }
1951
1952 /* Return a new REAL_CST node whose type is TYPE
1953 and whose value is the integer value of the INTEGER_CST node I. */
1954
1955 REAL_VALUE_TYPE
1956 real_value_from_int_cst (const_tree type, const_tree i)
1957 {
1958 REAL_VALUE_TYPE d;
1959
1960 /* Clear all bits of the real value type so that we can later do
1961 bitwise comparisons to see if two values are the same. */
1962 memset (&d, 0, sizeof d);
1963
1964 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, i,
1965 TYPE_SIGN (TREE_TYPE (i)));
1966 return d;
1967 }
1968
1969 /* Given a tree representing an integer constant I, return a tree
1970 representing the same value as a floating-point constant of type TYPE. */
1971
1972 tree
1973 build_real_from_int_cst (tree type, const_tree i)
1974 {
1975 tree v;
1976 int overflow = TREE_OVERFLOW (i);
1977
1978 v = build_real (type, real_value_from_int_cst (type, i));
1979
1980 TREE_OVERFLOW (v) |= overflow;
1981 return v;
1982 }
1983
1984 /* Return a newly constructed STRING_CST node whose value is
1985 the LEN characters at STR.
1986 Note that for a C string literal, LEN should include the trailing NUL.
1987 The TREE_TYPE is not initialized. */
1988
1989 tree
1990 build_string (int len, const char *str)
1991 {
1992 tree s;
1993 size_t length;
1994
1995 /* Do not waste bytes provided by padding of struct tree_string. */
1996 length = len + offsetof (struct tree_string, str) + 1;
1997
1998 record_node_allocation_statistics (STRING_CST, length);
1999
2000 s = (tree) ggc_internal_alloc (length);
2001
2002 memset (s, 0, sizeof (struct tree_typed));
2003 TREE_SET_CODE (s, STRING_CST);
2004 TREE_CONSTANT (s) = 1;
2005 TREE_STRING_LENGTH (s) = len;
2006 memcpy (s->string.str, str, len);
2007 s->string.str[len] = '\0';
2008
2009 return s;
2010 }
2011
2012 /* Return a newly constructed COMPLEX_CST node whose value is
2013 specified by the real and imaginary parts REAL and IMAG.
2014 Both REAL and IMAG should be constant nodes. TYPE, if specified,
2015 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
2016
2017 tree
2018 build_complex (tree type, tree real, tree imag)
2019 {
2020 tree t = make_node (COMPLEX_CST);
2021
2022 TREE_REALPART (t) = real;
2023 TREE_IMAGPART (t) = imag;
2024 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
2025 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
2026 return t;
2027 }
2028
2029 /* Build a complex (inf +- 0i), such as for the result of cproj.
2030 TYPE is the complex tree type of the result. If NEG is true, the
2031 imaginary zero is negative. */
2032
2033 tree
2034 build_complex_inf (tree type, bool neg)
2035 {
2036 REAL_VALUE_TYPE rinf, rzero = dconst0;
2037
2038 real_inf (&rinf);
2039 rzero.sign = neg;
2040 return build_complex (type, build_real (TREE_TYPE (type), rinf),
2041 build_real (TREE_TYPE (type), rzero));
2042 }
2043
2044 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
2045 element is set to 1. In particular, this is 1 + i for complex types. */
2046
2047 tree
2048 build_each_one_cst (tree type)
2049 {
2050 if (TREE_CODE (type) == COMPLEX_TYPE)
2051 {
2052 tree scalar = build_one_cst (TREE_TYPE (type));
2053 return build_complex (type, scalar, scalar);
2054 }
2055 else
2056 return build_one_cst (type);
2057 }
2058
2059 /* Return a constant of arithmetic type TYPE which is the
2060 multiplicative identity of the set TYPE. */
2061
2062 tree
2063 build_one_cst (tree type)
2064 {
2065 switch (TREE_CODE (type))
2066 {
2067 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2068 case POINTER_TYPE: case REFERENCE_TYPE:
2069 case OFFSET_TYPE:
2070 return build_int_cst (type, 1);
2071
2072 case REAL_TYPE:
2073 return build_real (type, dconst1);
2074
2075 case FIXED_POINT_TYPE:
2076 /* We can only generate 1 for accum types. */
2077 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2078 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
2079
2080 case VECTOR_TYPE:
2081 {
2082 tree scalar = build_one_cst (TREE_TYPE (type));
2083
2084 return build_vector_from_val (type, scalar);
2085 }
2086
2087 case COMPLEX_TYPE:
2088 return build_complex (type,
2089 build_one_cst (TREE_TYPE (type)),
2090 build_zero_cst (TREE_TYPE (type)));
2091
2092 default:
2093 gcc_unreachable ();
2094 }
2095 }
2096
2097 /* Return an integer of type TYPE containing all 1's in as much precision as
2098 it contains, or a complex or vector whose subparts are such integers. */
2099
2100 tree
2101 build_all_ones_cst (tree type)
2102 {
2103 if (TREE_CODE (type) == COMPLEX_TYPE)
2104 {
2105 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2106 return build_complex (type, scalar, scalar);
2107 }
2108 else
2109 return build_minus_one_cst (type);
2110 }
2111
2112 /* Return a constant of arithmetic type TYPE which is the
2113 opposite of the multiplicative identity of the set TYPE. */
2114
2115 tree
2116 build_minus_one_cst (tree type)
2117 {
2118 switch (TREE_CODE (type))
2119 {
2120 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2121 case POINTER_TYPE: case REFERENCE_TYPE:
2122 case OFFSET_TYPE:
2123 return build_int_cst (type, -1);
2124
2125 case REAL_TYPE:
2126 return build_real (type, dconstm1);
2127
2128 case FIXED_POINT_TYPE:
2129 /* We can only generate 1 for accum types. */
2130 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2131 return build_fixed (type, fixed_from_double_int (double_int_minus_one,
2132 TYPE_MODE (type)));
2133
2134 case VECTOR_TYPE:
2135 {
2136 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2137
2138 return build_vector_from_val (type, scalar);
2139 }
2140
2141 case COMPLEX_TYPE:
2142 return build_complex (type,
2143 build_minus_one_cst (TREE_TYPE (type)),
2144 build_zero_cst (TREE_TYPE (type)));
2145
2146 default:
2147 gcc_unreachable ();
2148 }
2149 }
2150
2151 /* Build 0 constant of type TYPE. This is used by constructor folding
2152 and thus the constant should be represented in memory by
2153 zero(es). */
2154
2155 tree
2156 build_zero_cst (tree type)
2157 {
2158 switch (TREE_CODE (type))
2159 {
2160 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2161 case POINTER_TYPE: case REFERENCE_TYPE:
2162 case OFFSET_TYPE: case NULLPTR_TYPE:
2163 return build_int_cst (type, 0);
2164
2165 case REAL_TYPE:
2166 return build_real (type, dconst0);
2167
2168 case FIXED_POINT_TYPE:
2169 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2170
2171 case VECTOR_TYPE:
2172 {
2173 tree scalar = build_zero_cst (TREE_TYPE (type));
2174
2175 return build_vector_from_val (type, scalar);
2176 }
2177
2178 case COMPLEX_TYPE:
2179 {
2180 tree zero = build_zero_cst (TREE_TYPE (type));
2181
2182 return build_complex (type, zero, zero);
2183 }
2184
2185 default:
2186 if (!AGGREGATE_TYPE_P (type))
2187 return fold_convert (type, integer_zero_node);
2188 return build_constructor (type, NULL);
2189 }
2190 }
2191
2192
2193 /* Build a BINFO with LEN language slots. */
2194
2195 tree
2196 make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL)
2197 {
2198 tree t;
2199 size_t length = (offsetof (struct tree_binfo, base_binfos)
2200 + vec<tree, va_gc>::embedded_size (base_binfos));
2201
2202 record_node_allocation_statistics (TREE_BINFO, length);
2203
2204 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2205
2206 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2207
2208 TREE_SET_CODE (t, TREE_BINFO);
2209
2210 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2211
2212 return t;
2213 }
2214
2215 /* Create a CASE_LABEL_EXPR tree node and return it. */
2216
2217 tree
2218 build_case_label (tree low_value, tree high_value, tree label_decl)
2219 {
2220 tree t = make_node (CASE_LABEL_EXPR);
2221
2222 TREE_TYPE (t) = void_type_node;
2223 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2224
2225 CASE_LOW (t) = low_value;
2226 CASE_HIGH (t) = high_value;
2227 CASE_LABEL (t) = label_decl;
2228 CASE_CHAIN (t) = NULL_TREE;
2229
2230 return t;
2231 }
2232
2233 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2234 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2235 The latter determines the length of the HOST_WIDE_INT vector. */
2236
2237 tree
2238 make_int_cst_stat (int len, int ext_len MEM_STAT_DECL)
2239 {
2240 tree t;
2241 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2242 + sizeof (struct tree_int_cst));
2243
2244 gcc_assert (len);
2245 record_node_allocation_statistics (INTEGER_CST, length);
2246
2247 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2248
2249 TREE_SET_CODE (t, INTEGER_CST);
2250 TREE_INT_CST_NUNITS (t) = len;
2251 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2252 /* to_offset can only be applied to trees that are offset_int-sized
2253 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2254 must be exactly the precision of offset_int and so LEN is correct. */
2255 if (ext_len <= OFFSET_INT_ELTS)
2256 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2257 else
2258 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2259
2260 TREE_CONSTANT (t) = 1;
2261
2262 return t;
2263 }
2264
2265 /* Build a newly constructed TREE_VEC node of length LEN. */
2266
2267 tree
2268 make_tree_vec_stat (int len MEM_STAT_DECL)
2269 {
2270 tree t;
2271 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2272
2273 record_node_allocation_statistics (TREE_VEC, length);
2274
2275 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2276
2277 TREE_SET_CODE (t, TREE_VEC);
2278 TREE_VEC_LENGTH (t) = len;
2279
2280 return t;
2281 }
2282
2283 /* Grow a TREE_VEC node to new length LEN. */
2284
2285 tree
2286 grow_tree_vec_stat (tree v, int len MEM_STAT_DECL)
2287 {
2288 gcc_assert (TREE_CODE (v) == TREE_VEC);
2289
2290 int oldlen = TREE_VEC_LENGTH (v);
2291 gcc_assert (len > oldlen);
2292
2293 int oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2294 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2295
2296 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2297
2298 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2299
2300 TREE_VEC_LENGTH (v) = len;
2301
2302 return v;
2303 }
2304 \f
2305 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
2306 fixed, and scalar, complex or vector. */
2307
2308 int
2309 zerop (const_tree expr)
2310 {
2311 return (integer_zerop (expr)
2312 || real_zerop (expr)
2313 || fixed_zerop (expr));
2314 }
2315
2316 /* Return 1 if EXPR is the integer constant zero or a complex constant
2317 of zero. */
2318
2319 int
2320 integer_zerop (const_tree expr)
2321 {
2322 switch (TREE_CODE (expr))
2323 {
2324 case INTEGER_CST:
2325 return wi::eq_p (expr, 0);
2326 case COMPLEX_CST:
2327 return (integer_zerop (TREE_REALPART (expr))
2328 && integer_zerop (TREE_IMAGPART (expr)));
2329 case VECTOR_CST:
2330 {
2331 unsigned i;
2332 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2333 if (!integer_zerop (VECTOR_CST_ELT (expr, i)))
2334 return false;
2335 return true;
2336 }
2337 default:
2338 return false;
2339 }
2340 }
2341
2342 /* Return 1 if EXPR is the integer constant one or the corresponding
2343 complex constant. */
2344
2345 int
2346 integer_onep (const_tree expr)
2347 {
2348 switch (TREE_CODE (expr))
2349 {
2350 case INTEGER_CST:
2351 return wi::eq_p (wi::to_widest (expr), 1);
2352 case COMPLEX_CST:
2353 return (integer_onep (TREE_REALPART (expr))
2354 && integer_zerop (TREE_IMAGPART (expr)));
2355 case VECTOR_CST:
2356 {
2357 unsigned i;
2358 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2359 if (!integer_onep (VECTOR_CST_ELT (expr, i)))
2360 return false;
2361 return true;
2362 }
2363 default:
2364 return false;
2365 }
2366 }
2367
2368 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2369 return 1 if every piece is the integer constant one. */
2370
2371 int
2372 integer_each_onep (const_tree expr)
2373 {
2374 if (TREE_CODE (expr) == COMPLEX_CST)
2375 return (integer_onep (TREE_REALPART (expr))
2376 && integer_onep (TREE_IMAGPART (expr)));
2377 else
2378 return integer_onep (expr);
2379 }
2380
2381 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2382 it contains, or a complex or vector whose subparts are such integers. */
2383
2384 int
2385 integer_all_onesp (const_tree expr)
2386 {
2387 if (TREE_CODE (expr) == COMPLEX_CST
2388 && integer_all_onesp (TREE_REALPART (expr))
2389 && integer_all_onesp (TREE_IMAGPART (expr)))
2390 return 1;
2391
2392 else if (TREE_CODE (expr) == VECTOR_CST)
2393 {
2394 unsigned i;
2395 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2396 if (!integer_all_onesp (VECTOR_CST_ELT (expr, i)))
2397 return 0;
2398 return 1;
2399 }
2400
2401 else if (TREE_CODE (expr) != INTEGER_CST)
2402 return 0;
2403
2404 return wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED) == expr;
2405 }
2406
2407 /* Return 1 if EXPR is the integer constant minus one. */
2408
2409 int
2410 integer_minus_onep (const_tree expr)
2411 {
2412 if (TREE_CODE (expr) == COMPLEX_CST)
2413 return (integer_all_onesp (TREE_REALPART (expr))
2414 && integer_zerop (TREE_IMAGPART (expr)));
2415 else
2416 return integer_all_onesp (expr);
2417 }
2418
2419 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2420 one bit on). */
2421
2422 int
2423 integer_pow2p (const_tree expr)
2424 {
2425 if (TREE_CODE (expr) == COMPLEX_CST
2426 && integer_pow2p (TREE_REALPART (expr))
2427 && integer_zerop (TREE_IMAGPART (expr)))
2428 return 1;
2429
2430 if (TREE_CODE (expr) != INTEGER_CST)
2431 return 0;
2432
2433 return wi::popcount (expr) == 1;
2434 }
2435
2436 /* Return 1 if EXPR is an integer constant other than zero or a
2437 complex constant other than zero. */
2438
2439 int
2440 integer_nonzerop (const_tree expr)
2441 {
2442 return ((TREE_CODE (expr) == INTEGER_CST
2443 && !wi::eq_p (expr, 0))
2444 || (TREE_CODE (expr) == COMPLEX_CST
2445 && (integer_nonzerop (TREE_REALPART (expr))
2446 || integer_nonzerop (TREE_IMAGPART (expr)))));
2447 }
2448
2449 /* Return 1 if EXPR is the integer constant one. For vector,
2450 return 1 if every piece is the integer constant minus one
2451 (representing the value TRUE). */
2452
2453 int
2454 integer_truep (const_tree expr)
2455 {
2456 if (TREE_CODE (expr) == VECTOR_CST)
2457 return integer_all_onesp (expr);
2458 return integer_onep (expr);
2459 }
2460
2461 /* Return 1 if EXPR is the fixed-point constant zero. */
2462
2463 int
2464 fixed_zerop (const_tree expr)
2465 {
2466 return (TREE_CODE (expr) == FIXED_CST
2467 && TREE_FIXED_CST (expr).data.is_zero ());
2468 }
2469
2470 /* Return the power of two represented by a tree node known to be a
2471 power of two. */
2472
2473 int
2474 tree_log2 (const_tree expr)
2475 {
2476 if (TREE_CODE (expr) == COMPLEX_CST)
2477 return tree_log2 (TREE_REALPART (expr));
2478
2479 return wi::exact_log2 (expr);
2480 }
2481
2482 /* Similar, but return the largest integer Y such that 2 ** Y is less
2483 than or equal to EXPR. */
2484
2485 int
2486 tree_floor_log2 (const_tree expr)
2487 {
2488 if (TREE_CODE (expr) == COMPLEX_CST)
2489 return tree_log2 (TREE_REALPART (expr));
2490
2491 return wi::floor_log2 (expr);
2492 }
2493
2494 /* Return number of known trailing zero bits in EXPR, or, if the value of
2495 EXPR is known to be zero, the precision of it's type. */
2496
2497 unsigned int
2498 tree_ctz (const_tree expr)
2499 {
2500 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2501 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2502 return 0;
2503
2504 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2505 switch (TREE_CODE (expr))
2506 {
2507 case INTEGER_CST:
2508 ret1 = wi::ctz (expr);
2509 return MIN (ret1, prec);
2510 case SSA_NAME:
2511 ret1 = wi::ctz (get_nonzero_bits (expr));
2512 return MIN (ret1, prec);
2513 case PLUS_EXPR:
2514 case MINUS_EXPR:
2515 case BIT_IOR_EXPR:
2516 case BIT_XOR_EXPR:
2517 case MIN_EXPR:
2518 case MAX_EXPR:
2519 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2520 if (ret1 == 0)
2521 return ret1;
2522 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2523 return MIN (ret1, ret2);
2524 case POINTER_PLUS_EXPR:
2525 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2526 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2527 /* Second operand is sizetype, which could be in theory
2528 wider than pointer's precision. Make sure we never
2529 return more than prec. */
2530 ret2 = MIN (ret2, prec);
2531 return MIN (ret1, ret2);
2532 case BIT_AND_EXPR:
2533 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2534 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2535 return MAX (ret1, ret2);
2536 case MULT_EXPR:
2537 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2538 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2539 return MIN (ret1 + ret2, prec);
2540 case LSHIFT_EXPR:
2541 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2542 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2543 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2544 {
2545 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2546 return MIN (ret1 + ret2, prec);
2547 }
2548 return ret1;
2549 case RSHIFT_EXPR:
2550 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2551 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2552 {
2553 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2554 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2555 if (ret1 > ret2)
2556 return ret1 - ret2;
2557 }
2558 return 0;
2559 case TRUNC_DIV_EXPR:
2560 case CEIL_DIV_EXPR:
2561 case FLOOR_DIV_EXPR:
2562 case ROUND_DIV_EXPR:
2563 case EXACT_DIV_EXPR:
2564 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2565 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2566 {
2567 int l = tree_log2 (TREE_OPERAND (expr, 1));
2568 if (l >= 0)
2569 {
2570 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2571 ret2 = l;
2572 if (ret1 > ret2)
2573 return ret1 - ret2;
2574 }
2575 }
2576 return 0;
2577 CASE_CONVERT:
2578 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2579 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2580 ret1 = prec;
2581 return MIN (ret1, prec);
2582 case SAVE_EXPR:
2583 return tree_ctz (TREE_OPERAND (expr, 0));
2584 case COND_EXPR:
2585 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2586 if (ret1 == 0)
2587 return 0;
2588 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2589 return MIN (ret1, ret2);
2590 case COMPOUND_EXPR:
2591 return tree_ctz (TREE_OPERAND (expr, 1));
2592 case ADDR_EXPR:
2593 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2594 if (ret1 > BITS_PER_UNIT)
2595 {
2596 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2597 return MIN (ret1, prec);
2598 }
2599 return 0;
2600 default:
2601 return 0;
2602 }
2603 }
2604
2605 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2606 decimal float constants, so don't return 1 for them. */
2607
2608 int
2609 real_zerop (const_tree expr)
2610 {
2611 switch (TREE_CODE (expr))
2612 {
2613 case REAL_CST:
2614 return real_equal (&TREE_REAL_CST (expr), &dconst0)
2615 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2616 case COMPLEX_CST:
2617 return real_zerop (TREE_REALPART (expr))
2618 && real_zerop (TREE_IMAGPART (expr));
2619 case VECTOR_CST:
2620 {
2621 unsigned i;
2622 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2623 if (!real_zerop (VECTOR_CST_ELT (expr, i)))
2624 return false;
2625 return true;
2626 }
2627 default:
2628 return false;
2629 }
2630 }
2631
2632 /* Return 1 if EXPR is the real constant one in real or complex form.
2633 Trailing zeroes matter for decimal float constants, so don't return
2634 1 for them. */
2635
2636 int
2637 real_onep (const_tree expr)
2638 {
2639 switch (TREE_CODE (expr))
2640 {
2641 case REAL_CST:
2642 return real_equal (&TREE_REAL_CST (expr), &dconst1)
2643 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2644 case COMPLEX_CST:
2645 return real_onep (TREE_REALPART (expr))
2646 && real_zerop (TREE_IMAGPART (expr));
2647 case VECTOR_CST:
2648 {
2649 unsigned i;
2650 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2651 if (!real_onep (VECTOR_CST_ELT (expr, i)))
2652 return false;
2653 return true;
2654 }
2655 default:
2656 return false;
2657 }
2658 }
2659
2660 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2661 matter for decimal float constants, so don't return 1 for them. */
2662
2663 int
2664 real_minus_onep (const_tree expr)
2665 {
2666 switch (TREE_CODE (expr))
2667 {
2668 case REAL_CST:
2669 return real_equal (&TREE_REAL_CST (expr), &dconstm1)
2670 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2671 case COMPLEX_CST:
2672 return real_minus_onep (TREE_REALPART (expr))
2673 && real_zerop (TREE_IMAGPART (expr));
2674 case VECTOR_CST:
2675 {
2676 unsigned i;
2677 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2678 if (!real_minus_onep (VECTOR_CST_ELT (expr, i)))
2679 return false;
2680 return true;
2681 }
2682 default:
2683 return false;
2684 }
2685 }
2686
2687 /* Nonzero if EXP is a constant or a cast of a constant. */
2688
2689 int
2690 really_constant_p (const_tree exp)
2691 {
2692 /* This is not quite the same as STRIP_NOPS. It does more. */
2693 while (CONVERT_EXPR_P (exp)
2694 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2695 exp = TREE_OPERAND (exp, 0);
2696 return TREE_CONSTANT (exp);
2697 }
2698 \f
2699 /* Return first list element whose TREE_VALUE is ELEM.
2700 Return 0 if ELEM is not in LIST. */
2701
2702 tree
2703 value_member (tree elem, tree list)
2704 {
2705 while (list)
2706 {
2707 if (elem == TREE_VALUE (list))
2708 return list;
2709 list = TREE_CHAIN (list);
2710 }
2711 return NULL_TREE;
2712 }
2713
2714 /* Return first list element whose TREE_PURPOSE is ELEM.
2715 Return 0 if ELEM is not in LIST. */
2716
2717 tree
2718 purpose_member (const_tree elem, tree list)
2719 {
2720 while (list)
2721 {
2722 if (elem == TREE_PURPOSE (list))
2723 return list;
2724 list = TREE_CHAIN (list);
2725 }
2726 return NULL_TREE;
2727 }
2728
2729 /* Return true if ELEM is in V. */
2730
2731 bool
2732 vec_member (const_tree elem, vec<tree, va_gc> *v)
2733 {
2734 unsigned ix;
2735 tree t;
2736 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2737 if (elem == t)
2738 return true;
2739 return false;
2740 }
2741
2742 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2743 NULL_TREE. */
2744
2745 tree
2746 chain_index (int idx, tree chain)
2747 {
2748 for (; chain && idx > 0; --idx)
2749 chain = TREE_CHAIN (chain);
2750 return chain;
2751 }
2752
2753 /* Return nonzero if ELEM is part of the chain CHAIN. */
2754
2755 int
2756 chain_member (const_tree elem, const_tree chain)
2757 {
2758 while (chain)
2759 {
2760 if (elem == chain)
2761 return 1;
2762 chain = DECL_CHAIN (chain);
2763 }
2764
2765 return 0;
2766 }
2767
2768 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2769 We expect a null pointer to mark the end of the chain.
2770 This is the Lisp primitive `length'. */
2771
2772 int
2773 list_length (const_tree t)
2774 {
2775 const_tree p = t;
2776 #ifdef ENABLE_TREE_CHECKING
2777 const_tree q = t;
2778 #endif
2779 int len = 0;
2780
2781 while (p)
2782 {
2783 p = TREE_CHAIN (p);
2784 #ifdef ENABLE_TREE_CHECKING
2785 if (len % 2)
2786 q = TREE_CHAIN (q);
2787 gcc_assert (p != q);
2788 #endif
2789 len++;
2790 }
2791
2792 return len;
2793 }
2794
2795 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2796 UNION_TYPE TYPE, or NULL_TREE if none. */
2797
2798 tree
2799 first_field (const_tree type)
2800 {
2801 tree t = TYPE_FIELDS (type);
2802 while (t && TREE_CODE (t) != FIELD_DECL)
2803 t = TREE_CHAIN (t);
2804 return t;
2805 }
2806
2807 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2808 by modifying the last node in chain 1 to point to chain 2.
2809 This is the Lisp primitive `nconc'. */
2810
2811 tree
2812 chainon (tree op1, tree op2)
2813 {
2814 tree t1;
2815
2816 if (!op1)
2817 return op2;
2818 if (!op2)
2819 return op1;
2820
2821 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
2822 continue;
2823 TREE_CHAIN (t1) = op2;
2824
2825 #ifdef ENABLE_TREE_CHECKING
2826 {
2827 tree t2;
2828 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
2829 gcc_assert (t2 != t1);
2830 }
2831 #endif
2832
2833 return op1;
2834 }
2835
2836 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2837
2838 tree
2839 tree_last (tree chain)
2840 {
2841 tree next;
2842 if (chain)
2843 while ((next = TREE_CHAIN (chain)))
2844 chain = next;
2845 return chain;
2846 }
2847
2848 /* Reverse the order of elements in the chain T,
2849 and return the new head of the chain (old last element). */
2850
2851 tree
2852 nreverse (tree t)
2853 {
2854 tree prev = 0, decl, next;
2855 for (decl = t; decl; decl = next)
2856 {
2857 /* We shouldn't be using this function to reverse BLOCK chains; we
2858 have blocks_nreverse for that. */
2859 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
2860 next = TREE_CHAIN (decl);
2861 TREE_CHAIN (decl) = prev;
2862 prev = decl;
2863 }
2864 return prev;
2865 }
2866 \f
2867 /* Return a newly created TREE_LIST node whose
2868 purpose and value fields are PARM and VALUE. */
2869
2870 tree
2871 build_tree_list_stat (tree parm, tree value MEM_STAT_DECL)
2872 {
2873 tree t = make_node_stat (TREE_LIST PASS_MEM_STAT);
2874 TREE_PURPOSE (t) = parm;
2875 TREE_VALUE (t) = value;
2876 return t;
2877 }
2878
2879 /* Build a chain of TREE_LIST nodes from a vector. */
2880
2881 tree
2882 build_tree_list_vec_stat (const vec<tree, va_gc> *vec MEM_STAT_DECL)
2883 {
2884 tree ret = NULL_TREE;
2885 tree *pp = &ret;
2886 unsigned int i;
2887 tree t;
2888 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
2889 {
2890 *pp = build_tree_list_stat (NULL, t PASS_MEM_STAT);
2891 pp = &TREE_CHAIN (*pp);
2892 }
2893 return ret;
2894 }
2895
2896 /* Return a newly created TREE_LIST node whose
2897 purpose and value fields are PURPOSE and VALUE
2898 and whose TREE_CHAIN is CHAIN. */
2899
2900 tree
2901 tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL)
2902 {
2903 tree node;
2904
2905 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
2906 memset (node, 0, sizeof (struct tree_common));
2907
2908 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
2909
2910 TREE_SET_CODE (node, TREE_LIST);
2911 TREE_CHAIN (node) = chain;
2912 TREE_PURPOSE (node) = purpose;
2913 TREE_VALUE (node) = value;
2914 return node;
2915 }
2916
2917 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2918 trees. */
2919
2920 vec<tree, va_gc> *
2921 ctor_to_vec (tree ctor)
2922 {
2923 vec<tree, va_gc> *vec;
2924 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
2925 unsigned int ix;
2926 tree val;
2927
2928 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
2929 vec->quick_push (val);
2930
2931 return vec;
2932 }
2933 \f
2934 /* Return the size nominally occupied by an object of type TYPE
2935 when it resides in memory. The value is measured in units of bytes,
2936 and its data type is that normally used for type sizes
2937 (which is the first type created by make_signed_type or
2938 make_unsigned_type). */
2939
2940 tree
2941 size_in_bytes_loc (location_t loc, const_tree type)
2942 {
2943 tree t;
2944
2945 if (type == error_mark_node)
2946 return integer_zero_node;
2947
2948 type = TYPE_MAIN_VARIANT (type);
2949 t = TYPE_SIZE_UNIT (type);
2950
2951 if (t == 0)
2952 {
2953 lang_hooks.types.incomplete_type_error (loc, NULL_TREE, type);
2954 return size_zero_node;
2955 }
2956
2957 return t;
2958 }
2959
2960 /* Return the size of TYPE (in bytes) as a wide integer
2961 or return -1 if the size can vary or is larger than an integer. */
2962
2963 HOST_WIDE_INT
2964 int_size_in_bytes (const_tree type)
2965 {
2966 tree t;
2967
2968 if (type == error_mark_node)
2969 return 0;
2970
2971 type = TYPE_MAIN_VARIANT (type);
2972 t = TYPE_SIZE_UNIT (type);
2973
2974 if (t && tree_fits_uhwi_p (t))
2975 return TREE_INT_CST_LOW (t);
2976 else
2977 return -1;
2978 }
2979
2980 /* Return the maximum size of TYPE (in bytes) as a wide integer
2981 or return -1 if the size can vary or is larger than an integer. */
2982
2983 HOST_WIDE_INT
2984 max_int_size_in_bytes (const_tree type)
2985 {
2986 HOST_WIDE_INT size = -1;
2987 tree size_tree;
2988
2989 /* If this is an array type, check for a possible MAX_SIZE attached. */
2990
2991 if (TREE_CODE (type) == ARRAY_TYPE)
2992 {
2993 size_tree = TYPE_ARRAY_MAX_SIZE (type);
2994
2995 if (size_tree && tree_fits_uhwi_p (size_tree))
2996 size = tree_to_uhwi (size_tree);
2997 }
2998
2999 /* If we still haven't been able to get a size, see if the language
3000 can compute a maximum size. */
3001
3002 if (size == -1)
3003 {
3004 size_tree = lang_hooks.types.max_size (type);
3005
3006 if (size_tree && tree_fits_uhwi_p (size_tree))
3007 size = tree_to_uhwi (size_tree);
3008 }
3009
3010 return size;
3011 }
3012 \f
3013 /* Return the bit position of FIELD, in bits from the start of the record.
3014 This is a tree of type bitsizetype. */
3015
3016 tree
3017 bit_position (const_tree field)
3018 {
3019 return bit_from_pos (DECL_FIELD_OFFSET (field),
3020 DECL_FIELD_BIT_OFFSET (field));
3021 }
3022 \f
3023 /* Return the byte position of FIELD, in bytes from the start of the record.
3024 This is a tree of type sizetype. */
3025
3026 tree
3027 byte_position (const_tree field)
3028 {
3029 return byte_from_pos (DECL_FIELD_OFFSET (field),
3030 DECL_FIELD_BIT_OFFSET (field));
3031 }
3032
3033 /* Likewise, but return as an integer. It must be representable in
3034 that way (since it could be a signed value, we don't have the
3035 option of returning -1 like int_size_in_byte can. */
3036
3037 HOST_WIDE_INT
3038 int_byte_position (const_tree field)
3039 {
3040 return tree_to_shwi (byte_position (field));
3041 }
3042 \f
3043 /* Return the strictest alignment, in bits, that T is known to have. */
3044
3045 unsigned int
3046 expr_align (const_tree t)
3047 {
3048 unsigned int align0, align1;
3049
3050 switch (TREE_CODE (t))
3051 {
3052 CASE_CONVERT: case NON_LVALUE_EXPR:
3053 /* If we have conversions, we know that the alignment of the
3054 object must meet each of the alignments of the types. */
3055 align0 = expr_align (TREE_OPERAND (t, 0));
3056 align1 = TYPE_ALIGN (TREE_TYPE (t));
3057 return MAX (align0, align1);
3058
3059 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
3060 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
3061 case CLEANUP_POINT_EXPR:
3062 /* These don't change the alignment of an object. */
3063 return expr_align (TREE_OPERAND (t, 0));
3064
3065 case COND_EXPR:
3066 /* The best we can do is say that the alignment is the least aligned
3067 of the two arms. */
3068 align0 = expr_align (TREE_OPERAND (t, 1));
3069 align1 = expr_align (TREE_OPERAND (t, 2));
3070 return MIN (align0, align1);
3071
3072 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
3073 meaningfully, it's always 1. */
3074 case LABEL_DECL: case CONST_DECL:
3075 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
3076 case FUNCTION_DECL:
3077 gcc_assert (DECL_ALIGN (t) != 0);
3078 return DECL_ALIGN (t);
3079
3080 default:
3081 break;
3082 }
3083
3084 /* Otherwise take the alignment from that of the type. */
3085 return TYPE_ALIGN (TREE_TYPE (t));
3086 }
3087 \f
3088 /* Return, as a tree node, the number of elements for TYPE (which is an
3089 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3090
3091 tree
3092 array_type_nelts (const_tree type)
3093 {
3094 tree index_type, min, max;
3095
3096 /* If they did it with unspecified bounds, then we should have already
3097 given an error about it before we got here. */
3098 if (! TYPE_DOMAIN (type))
3099 return error_mark_node;
3100
3101 index_type = TYPE_DOMAIN (type);
3102 min = TYPE_MIN_VALUE (index_type);
3103 max = TYPE_MAX_VALUE (index_type);
3104
3105 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3106 if (!max)
3107 return error_mark_node;
3108
3109 return (integer_zerop (min)
3110 ? max
3111 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3112 }
3113 \f
3114 /* If arg is static -- a reference to an object in static storage -- then
3115 return the object. This is not the same as the C meaning of `static'.
3116 If arg isn't static, return NULL. */
3117
3118 tree
3119 staticp (tree arg)
3120 {
3121 switch (TREE_CODE (arg))
3122 {
3123 case FUNCTION_DECL:
3124 /* Nested functions are static, even though taking their address will
3125 involve a trampoline as we unnest the nested function and create
3126 the trampoline on the tree level. */
3127 return arg;
3128
3129 case VAR_DECL:
3130 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3131 && ! DECL_THREAD_LOCAL_P (arg)
3132 && ! DECL_DLLIMPORT_P (arg)
3133 ? arg : NULL);
3134
3135 case CONST_DECL:
3136 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3137 ? arg : NULL);
3138
3139 case CONSTRUCTOR:
3140 return TREE_STATIC (arg) ? arg : NULL;
3141
3142 case LABEL_DECL:
3143 case STRING_CST:
3144 return arg;
3145
3146 case COMPONENT_REF:
3147 /* If the thing being referenced is not a field, then it is
3148 something language specific. */
3149 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3150
3151 /* If we are referencing a bitfield, we can't evaluate an
3152 ADDR_EXPR at compile time and so it isn't a constant. */
3153 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3154 return NULL;
3155
3156 return staticp (TREE_OPERAND (arg, 0));
3157
3158 case BIT_FIELD_REF:
3159 return NULL;
3160
3161 case INDIRECT_REF:
3162 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3163
3164 case ARRAY_REF:
3165 case ARRAY_RANGE_REF:
3166 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3167 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3168 return staticp (TREE_OPERAND (arg, 0));
3169 else
3170 return NULL;
3171
3172 case COMPOUND_LITERAL_EXPR:
3173 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3174
3175 default:
3176 return NULL;
3177 }
3178 }
3179
3180 \f
3181
3182
3183 /* Return whether OP is a DECL whose address is function-invariant. */
3184
3185 bool
3186 decl_address_invariant_p (const_tree op)
3187 {
3188 /* The conditions below are slightly less strict than the one in
3189 staticp. */
3190
3191 switch (TREE_CODE (op))
3192 {
3193 case PARM_DECL:
3194 case RESULT_DECL:
3195 case LABEL_DECL:
3196 case FUNCTION_DECL:
3197 return true;
3198
3199 case VAR_DECL:
3200 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3201 || DECL_THREAD_LOCAL_P (op)
3202 || DECL_CONTEXT (op) == current_function_decl
3203 || decl_function_context (op) == current_function_decl)
3204 return true;
3205 break;
3206
3207 case CONST_DECL:
3208 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3209 || decl_function_context (op) == current_function_decl)
3210 return true;
3211 break;
3212
3213 default:
3214 break;
3215 }
3216
3217 return false;
3218 }
3219
3220 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3221
3222 bool
3223 decl_address_ip_invariant_p (const_tree op)
3224 {
3225 /* The conditions below are slightly less strict than the one in
3226 staticp. */
3227
3228 switch (TREE_CODE (op))
3229 {
3230 case LABEL_DECL:
3231 case FUNCTION_DECL:
3232 case STRING_CST:
3233 return true;
3234
3235 case VAR_DECL:
3236 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3237 && !DECL_DLLIMPORT_P (op))
3238 || DECL_THREAD_LOCAL_P (op))
3239 return true;
3240 break;
3241
3242 case CONST_DECL:
3243 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3244 return true;
3245 break;
3246
3247 default:
3248 break;
3249 }
3250
3251 return false;
3252 }
3253
3254
3255 /* Return true if T is function-invariant (internal function, does
3256 not handle arithmetic; that's handled in skip_simple_arithmetic and
3257 tree_invariant_p). */
3258
3259 static bool
3260 tree_invariant_p_1 (tree t)
3261 {
3262 tree op;
3263
3264 if (TREE_CONSTANT (t)
3265 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3266 return true;
3267
3268 switch (TREE_CODE (t))
3269 {
3270 case SAVE_EXPR:
3271 return true;
3272
3273 case ADDR_EXPR:
3274 op = TREE_OPERAND (t, 0);
3275 while (handled_component_p (op))
3276 {
3277 switch (TREE_CODE (op))
3278 {
3279 case ARRAY_REF:
3280 case ARRAY_RANGE_REF:
3281 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3282 || TREE_OPERAND (op, 2) != NULL_TREE
3283 || TREE_OPERAND (op, 3) != NULL_TREE)
3284 return false;
3285 break;
3286
3287 case COMPONENT_REF:
3288 if (TREE_OPERAND (op, 2) != NULL_TREE)
3289 return false;
3290 break;
3291
3292 default:;
3293 }
3294 op = TREE_OPERAND (op, 0);
3295 }
3296
3297 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3298
3299 default:
3300 break;
3301 }
3302
3303 return false;
3304 }
3305
3306 /* Return true if T is function-invariant. */
3307
3308 bool
3309 tree_invariant_p (tree t)
3310 {
3311 tree inner = skip_simple_arithmetic (t);
3312 return tree_invariant_p_1 (inner);
3313 }
3314
3315 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3316 Do this to any expression which may be used in more than one place,
3317 but must be evaluated only once.
3318
3319 Normally, expand_expr would reevaluate the expression each time.
3320 Calling save_expr produces something that is evaluated and recorded
3321 the first time expand_expr is called on it. Subsequent calls to
3322 expand_expr just reuse the recorded value.
3323
3324 The call to expand_expr that generates code that actually computes
3325 the value is the first call *at compile time*. Subsequent calls
3326 *at compile time* generate code to use the saved value.
3327 This produces correct result provided that *at run time* control
3328 always flows through the insns made by the first expand_expr
3329 before reaching the other places where the save_expr was evaluated.
3330 You, the caller of save_expr, must make sure this is so.
3331
3332 Constants, and certain read-only nodes, are returned with no
3333 SAVE_EXPR because that is safe. Expressions containing placeholders
3334 are not touched; see tree.def for an explanation of what these
3335 are used for. */
3336
3337 tree
3338 save_expr (tree expr)
3339 {
3340 tree t = fold (expr);
3341 tree inner;
3342
3343 /* If the tree evaluates to a constant, then we don't want to hide that
3344 fact (i.e. this allows further folding, and direct checks for constants).
3345 However, a read-only object that has side effects cannot be bypassed.
3346 Since it is no problem to reevaluate literals, we just return the
3347 literal node. */
3348 inner = skip_simple_arithmetic (t);
3349 if (TREE_CODE (inner) == ERROR_MARK)
3350 return inner;
3351
3352 if (tree_invariant_p_1 (inner))
3353 return t;
3354
3355 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3356 it means that the size or offset of some field of an object depends on
3357 the value within another field.
3358
3359 Note that it must not be the case that T contains both a PLACEHOLDER_EXPR
3360 and some variable since it would then need to be both evaluated once and
3361 evaluated more than once. Front-ends must assure this case cannot
3362 happen by surrounding any such subexpressions in their own SAVE_EXPR
3363 and forcing evaluation at the proper time. */
3364 if (contains_placeholder_p (inner))
3365 return t;
3366
3367 t = build1 (SAVE_EXPR, TREE_TYPE (expr), t);
3368 SET_EXPR_LOCATION (t, EXPR_LOCATION (expr));
3369
3370 /* This expression might be placed ahead of a jump to ensure that the
3371 value was computed on both sides of the jump. So make sure it isn't
3372 eliminated as dead. */
3373 TREE_SIDE_EFFECTS (t) = 1;
3374 return t;
3375 }
3376
3377 /* Look inside EXPR into any simple arithmetic operations. Return the
3378 outermost non-arithmetic or non-invariant node. */
3379
3380 tree
3381 skip_simple_arithmetic (tree expr)
3382 {
3383 /* We don't care about whether this can be used as an lvalue in this
3384 context. */
3385 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3386 expr = TREE_OPERAND (expr, 0);
3387
3388 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3389 a constant, it will be more efficient to not make another SAVE_EXPR since
3390 it will allow better simplification and GCSE will be able to merge the
3391 computations if they actually occur. */
3392 while (true)
3393 {
3394 if (UNARY_CLASS_P (expr))
3395 expr = TREE_OPERAND (expr, 0);
3396 else if (BINARY_CLASS_P (expr))
3397 {
3398 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3399 expr = TREE_OPERAND (expr, 0);
3400 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3401 expr = TREE_OPERAND (expr, 1);
3402 else
3403 break;
3404 }
3405 else
3406 break;
3407 }
3408
3409 return expr;
3410 }
3411
3412 /* Look inside EXPR into simple arithmetic operations involving constants.
3413 Return the outermost non-arithmetic or non-constant node. */
3414
3415 tree
3416 skip_simple_constant_arithmetic (tree expr)
3417 {
3418 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3419 expr = TREE_OPERAND (expr, 0);
3420
3421 while (true)
3422 {
3423 if (UNARY_CLASS_P (expr))
3424 expr = TREE_OPERAND (expr, 0);
3425 else if (BINARY_CLASS_P (expr))
3426 {
3427 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3428 expr = TREE_OPERAND (expr, 0);
3429 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3430 expr = TREE_OPERAND (expr, 1);
3431 else
3432 break;
3433 }
3434 else
3435 break;
3436 }
3437
3438 return expr;
3439 }
3440
3441 /* Return which tree structure is used by T. */
3442
3443 enum tree_node_structure_enum
3444 tree_node_structure (const_tree t)
3445 {
3446 const enum tree_code code = TREE_CODE (t);
3447 return tree_node_structure_for_code (code);
3448 }
3449
3450 /* Set various status flags when building a CALL_EXPR object T. */
3451
3452 static void
3453 process_call_operands (tree t)
3454 {
3455 bool side_effects = TREE_SIDE_EFFECTS (t);
3456 bool read_only = false;
3457 int i = call_expr_flags (t);
3458
3459 /* Calls have side-effects, except those to const or pure functions. */
3460 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3461 side_effects = true;
3462 /* Propagate TREE_READONLY of arguments for const functions. */
3463 if (i & ECF_CONST)
3464 read_only = true;
3465
3466 if (!side_effects || read_only)
3467 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3468 {
3469 tree op = TREE_OPERAND (t, i);
3470 if (op && TREE_SIDE_EFFECTS (op))
3471 side_effects = true;
3472 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3473 read_only = false;
3474 }
3475
3476 TREE_SIDE_EFFECTS (t) = side_effects;
3477 TREE_READONLY (t) = read_only;
3478 }
3479 \f
3480 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3481 size or offset that depends on a field within a record. */
3482
3483 bool
3484 contains_placeholder_p (const_tree exp)
3485 {
3486 enum tree_code code;
3487
3488 if (!exp)
3489 return 0;
3490
3491 code = TREE_CODE (exp);
3492 if (code == PLACEHOLDER_EXPR)
3493 return 1;
3494
3495 switch (TREE_CODE_CLASS (code))
3496 {
3497 case tcc_reference:
3498 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3499 position computations since they will be converted into a
3500 WITH_RECORD_EXPR involving the reference, which will assume
3501 here will be valid. */
3502 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3503
3504 case tcc_exceptional:
3505 if (code == TREE_LIST)
3506 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3507 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3508 break;
3509
3510 case tcc_unary:
3511 case tcc_binary:
3512 case tcc_comparison:
3513 case tcc_expression:
3514 switch (code)
3515 {
3516 case COMPOUND_EXPR:
3517 /* Ignoring the first operand isn't quite right, but works best. */
3518 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3519
3520 case COND_EXPR:
3521 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3522 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3523 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3524
3525 case SAVE_EXPR:
3526 /* The save_expr function never wraps anything containing
3527 a PLACEHOLDER_EXPR. */
3528 return 0;
3529
3530 default:
3531 break;
3532 }
3533
3534 switch (TREE_CODE_LENGTH (code))
3535 {
3536 case 1:
3537 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3538 case 2:
3539 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3540 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3541 default:
3542 return 0;
3543 }
3544
3545 case tcc_vl_exp:
3546 switch (code)
3547 {
3548 case CALL_EXPR:
3549 {
3550 const_tree arg;
3551 const_call_expr_arg_iterator iter;
3552 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3553 if (CONTAINS_PLACEHOLDER_P (arg))
3554 return 1;
3555 return 0;
3556 }
3557 default:
3558 return 0;
3559 }
3560
3561 default:
3562 return 0;
3563 }
3564 return 0;
3565 }
3566
3567 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3568 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3569 field positions. */
3570
3571 static bool
3572 type_contains_placeholder_1 (const_tree type)
3573 {
3574 /* If the size contains a placeholder or the parent type (component type in
3575 the case of arrays) type involves a placeholder, this type does. */
3576 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3577 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3578 || (!POINTER_TYPE_P (type)
3579 && TREE_TYPE (type)
3580 && type_contains_placeholder_p (TREE_TYPE (type))))
3581 return true;
3582
3583 /* Now do type-specific checks. Note that the last part of the check above
3584 greatly limits what we have to do below. */
3585 switch (TREE_CODE (type))
3586 {
3587 case VOID_TYPE:
3588 case POINTER_BOUNDS_TYPE:
3589 case COMPLEX_TYPE:
3590 case ENUMERAL_TYPE:
3591 case BOOLEAN_TYPE:
3592 case POINTER_TYPE:
3593 case OFFSET_TYPE:
3594 case REFERENCE_TYPE:
3595 case METHOD_TYPE:
3596 case FUNCTION_TYPE:
3597 case VECTOR_TYPE:
3598 case NULLPTR_TYPE:
3599 return false;
3600
3601 case INTEGER_TYPE:
3602 case REAL_TYPE:
3603 case FIXED_POINT_TYPE:
3604 /* Here we just check the bounds. */
3605 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3606 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3607
3608 case ARRAY_TYPE:
3609 /* We have already checked the component type above, so just check
3610 the domain type. Flexible array members have a null domain. */
3611 return TYPE_DOMAIN (type) ?
3612 type_contains_placeholder_p (TYPE_DOMAIN (type)) : false;
3613
3614 case RECORD_TYPE:
3615 case UNION_TYPE:
3616 case QUAL_UNION_TYPE:
3617 {
3618 tree field;
3619
3620 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3621 if (TREE_CODE (field) == FIELD_DECL
3622 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3623 || (TREE_CODE (type) == QUAL_UNION_TYPE
3624 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3625 || type_contains_placeholder_p (TREE_TYPE (field))))
3626 return true;
3627
3628 return false;
3629 }
3630
3631 default:
3632 gcc_unreachable ();
3633 }
3634 }
3635
3636 /* Wrapper around above function used to cache its result. */
3637
3638 bool
3639 type_contains_placeholder_p (tree type)
3640 {
3641 bool result;
3642
3643 /* If the contains_placeholder_bits field has been initialized,
3644 then we know the answer. */
3645 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3646 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3647
3648 /* Indicate that we've seen this type node, and the answer is false.
3649 This is what we want to return if we run into recursion via fields. */
3650 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3651
3652 /* Compute the real value. */
3653 result = type_contains_placeholder_1 (type);
3654
3655 /* Store the real value. */
3656 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3657
3658 return result;
3659 }
3660 \f
3661 /* Push tree EXP onto vector QUEUE if it is not already present. */
3662
3663 static void
3664 push_without_duplicates (tree exp, vec<tree> *queue)
3665 {
3666 unsigned int i;
3667 tree iter;
3668
3669 FOR_EACH_VEC_ELT (*queue, i, iter)
3670 if (simple_cst_equal (iter, exp) == 1)
3671 break;
3672
3673 if (!iter)
3674 queue->safe_push (exp);
3675 }
3676
3677 /* Given a tree EXP, find all occurrences of references to fields
3678 in a PLACEHOLDER_EXPR and place them in vector REFS without
3679 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3680 we assume here that EXP contains only arithmetic expressions
3681 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3682 argument list. */
3683
3684 void
3685 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3686 {
3687 enum tree_code code = TREE_CODE (exp);
3688 tree inner;
3689 int i;
3690
3691 /* We handle TREE_LIST and COMPONENT_REF separately. */
3692 if (code == TREE_LIST)
3693 {
3694 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3695 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3696 }
3697 else if (code == COMPONENT_REF)
3698 {
3699 for (inner = TREE_OPERAND (exp, 0);
3700 REFERENCE_CLASS_P (inner);
3701 inner = TREE_OPERAND (inner, 0))
3702 ;
3703
3704 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3705 push_without_duplicates (exp, refs);
3706 else
3707 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3708 }
3709 else
3710 switch (TREE_CODE_CLASS (code))
3711 {
3712 case tcc_constant:
3713 break;
3714
3715 case tcc_declaration:
3716 /* Variables allocated to static storage can stay. */
3717 if (!TREE_STATIC (exp))
3718 push_without_duplicates (exp, refs);
3719 break;
3720
3721 case tcc_expression:
3722 /* This is the pattern built in ada/make_aligning_type. */
3723 if (code == ADDR_EXPR
3724 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3725 {
3726 push_without_duplicates (exp, refs);
3727 break;
3728 }
3729
3730 /* Fall through... */
3731
3732 case tcc_exceptional:
3733 case tcc_unary:
3734 case tcc_binary:
3735 case tcc_comparison:
3736 case tcc_reference:
3737 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3738 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3739 break;
3740
3741 case tcc_vl_exp:
3742 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3743 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3744 break;
3745
3746 default:
3747 gcc_unreachable ();
3748 }
3749 }
3750
3751 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3752 return a tree with all occurrences of references to F in a
3753 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3754 CONST_DECLs. Note that we assume here that EXP contains only
3755 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3756 occurring only in their argument list. */
3757
3758 tree
3759 substitute_in_expr (tree exp, tree f, tree r)
3760 {
3761 enum tree_code code = TREE_CODE (exp);
3762 tree op0, op1, op2, op3;
3763 tree new_tree;
3764
3765 /* We handle TREE_LIST and COMPONENT_REF separately. */
3766 if (code == TREE_LIST)
3767 {
3768 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3769 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3770 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3771 return exp;
3772
3773 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3774 }
3775 else if (code == COMPONENT_REF)
3776 {
3777 tree inner;
3778
3779 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3780 and it is the right field, replace it with R. */
3781 for (inner = TREE_OPERAND (exp, 0);
3782 REFERENCE_CLASS_P (inner);
3783 inner = TREE_OPERAND (inner, 0))
3784 ;
3785
3786 /* The field. */
3787 op1 = TREE_OPERAND (exp, 1);
3788
3789 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3790 return r;
3791
3792 /* If this expression hasn't been completed let, leave it alone. */
3793 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
3794 return exp;
3795
3796 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3797 if (op0 == TREE_OPERAND (exp, 0))
3798 return exp;
3799
3800 new_tree
3801 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
3802 }
3803 else
3804 switch (TREE_CODE_CLASS (code))
3805 {
3806 case tcc_constant:
3807 return exp;
3808
3809 case tcc_declaration:
3810 if (exp == f)
3811 return r;
3812 else
3813 return exp;
3814
3815 case tcc_expression:
3816 if (exp == f)
3817 return r;
3818
3819 /* Fall through... */
3820
3821 case tcc_exceptional:
3822 case tcc_unary:
3823 case tcc_binary:
3824 case tcc_comparison:
3825 case tcc_reference:
3826 switch (TREE_CODE_LENGTH (code))
3827 {
3828 case 0:
3829 return exp;
3830
3831 case 1:
3832 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3833 if (op0 == TREE_OPERAND (exp, 0))
3834 return exp;
3835
3836 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3837 break;
3838
3839 case 2:
3840 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3841 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3842
3843 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3844 return exp;
3845
3846 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3847 break;
3848
3849 case 3:
3850 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3851 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3852 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3853
3854 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3855 && op2 == TREE_OPERAND (exp, 2))
3856 return exp;
3857
3858 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3859 break;
3860
3861 case 4:
3862 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3863 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3864 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3865 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
3866
3867 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3868 && op2 == TREE_OPERAND (exp, 2)
3869 && op3 == TREE_OPERAND (exp, 3))
3870 return exp;
3871
3872 new_tree
3873 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3874 break;
3875
3876 default:
3877 gcc_unreachable ();
3878 }
3879 break;
3880
3881 case tcc_vl_exp:
3882 {
3883 int i;
3884
3885 new_tree = NULL_TREE;
3886
3887 /* If we are trying to replace F with a constant, inline back
3888 functions which do nothing else than computing a value from
3889 the arguments they are passed. This makes it possible to
3890 fold partially or entirely the replacement expression. */
3891 if (CONSTANT_CLASS_P (r) && code == CALL_EXPR)
3892 {
3893 tree t = maybe_inline_call_in_expr (exp);
3894 if (t)
3895 return SUBSTITUTE_IN_EXPR (t, f, r);
3896 }
3897
3898 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3899 {
3900 tree op = TREE_OPERAND (exp, i);
3901 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
3902 if (new_op != op)
3903 {
3904 if (!new_tree)
3905 new_tree = copy_node (exp);
3906 TREE_OPERAND (new_tree, i) = new_op;
3907 }
3908 }
3909
3910 if (new_tree)
3911 {
3912 new_tree = fold (new_tree);
3913 if (TREE_CODE (new_tree) == CALL_EXPR)
3914 process_call_operands (new_tree);
3915 }
3916 else
3917 return exp;
3918 }
3919 break;
3920
3921 default:
3922 gcc_unreachable ();
3923 }
3924
3925 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3926
3927 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3928 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3929
3930 return new_tree;
3931 }
3932
3933 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3934 for it within OBJ, a tree that is an object or a chain of references. */
3935
3936 tree
3937 substitute_placeholder_in_expr (tree exp, tree obj)
3938 {
3939 enum tree_code code = TREE_CODE (exp);
3940 tree op0, op1, op2, op3;
3941 tree new_tree;
3942
3943 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3944 in the chain of OBJ. */
3945 if (code == PLACEHOLDER_EXPR)
3946 {
3947 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
3948 tree elt;
3949
3950 for (elt = obj; elt != 0;
3951 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3952 || TREE_CODE (elt) == COND_EXPR)
3953 ? TREE_OPERAND (elt, 1)
3954 : (REFERENCE_CLASS_P (elt)
3955 || UNARY_CLASS_P (elt)
3956 || BINARY_CLASS_P (elt)
3957 || VL_EXP_CLASS_P (elt)
3958 || EXPRESSION_CLASS_P (elt))
3959 ? TREE_OPERAND (elt, 0) : 0))
3960 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
3961 return elt;
3962
3963 for (elt = obj; elt != 0;
3964 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3965 || TREE_CODE (elt) == COND_EXPR)
3966 ? TREE_OPERAND (elt, 1)
3967 : (REFERENCE_CLASS_P (elt)
3968 || UNARY_CLASS_P (elt)
3969 || BINARY_CLASS_P (elt)
3970 || VL_EXP_CLASS_P (elt)
3971 || EXPRESSION_CLASS_P (elt))
3972 ? TREE_OPERAND (elt, 0) : 0))
3973 if (POINTER_TYPE_P (TREE_TYPE (elt))
3974 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
3975 == need_type))
3976 return fold_build1 (INDIRECT_REF, need_type, elt);
3977
3978 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
3979 survives until RTL generation, there will be an error. */
3980 return exp;
3981 }
3982
3983 /* TREE_LIST is special because we need to look at TREE_VALUE
3984 and TREE_CHAIN, not TREE_OPERANDS. */
3985 else if (code == TREE_LIST)
3986 {
3987 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
3988 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
3989 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3990 return exp;
3991
3992 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3993 }
3994 else
3995 switch (TREE_CODE_CLASS (code))
3996 {
3997 case tcc_constant:
3998 case tcc_declaration:
3999 return exp;
4000
4001 case tcc_exceptional:
4002 case tcc_unary:
4003 case tcc_binary:
4004 case tcc_comparison:
4005 case tcc_expression:
4006 case tcc_reference:
4007 case tcc_statement:
4008 switch (TREE_CODE_LENGTH (code))
4009 {
4010 case 0:
4011 return exp;
4012
4013 case 1:
4014 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4015 if (op0 == TREE_OPERAND (exp, 0))
4016 return exp;
4017
4018 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4019 break;
4020
4021 case 2:
4022 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4023 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4024
4025 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4026 return exp;
4027
4028 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4029 break;
4030
4031 case 3:
4032 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4033 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4034 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4035
4036 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4037 && op2 == TREE_OPERAND (exp, 2))
4038 return exp;
4039
4040 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4041 break;
4042
4043 case 4:
4044 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4045 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4046 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4047 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
4048
4049 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4050 && op2 == TREE_OPERAND (exp, 2)
4051 && op3 == TREE_OPERAND (exp, 3))
4052 return exp;
4053
4054 new_tree
4055 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4056 break;
4057
4058 default:
4059 gcc_unreachable ();
4060 }
4061 break;
4062
4063 case tcc_vl_exp:
4064 {
4065 int i;
4066
4067 new_tree = NULL_TREE;
4068
4069 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4070 {
4071 tree op = TREE_OPERAND (exp, i);
4072 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
4073 if (new_op != op)
4074 {
4075 if (!new_tree)
4076 new_tree = copy_node (exp);
4077 TREE_OPERAND (new_tree, i) = new_op;
4078 }
4079 }
4080
4081 if (new_tree)
4082 {
4083 new_tree = fold (new_tree);
4084 if (TREE_CODE (new_tree) == CALL_EXPR)
4085 process_call_operands (new_tree);
4086 }
4087 else
4088 return exp;
4089 }
4090 break;
4091
4092 default:
4093 gcc_unreachable ();
4094 }
4095
4096 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4097
4098 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4099 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4100
4101 return new_tree;
4102 }
4103 \f
4104
4105 /* Subroutine of stabilize_reference; this is called for subtrees of
4106 references. Any expression with side-effects must be put in a SAVE_EXPR
4107 to ensure that it is only evaluated once.
4108
4109 We don't put SAVE_EXPR nodes around everything, because assigning very
4110 simple expressions to temporaries causes us to miss good opportunities
4111 for optimizations. Among other things, the opportunity to fold in the
4112 addition of a constant into an addressing mode often gets lost, e.g.
4113 "y[i+1] += x;". In general, we take the approach that we should not make
4114 an assignment unless we are forced into it - i.e., that any non-side effect
4115 operator should be allowed, and that cse should take care of coalescing
4116 multiple utterances of the same expression should that prove fruitful. */
4117
4118 static tree
4119 stabilize_reference_1 (tree e)
4120 {
4121 tree result;
4122 enum tree_code code = TREE_CODE (e);
4123
4124 /* We cannot ignore const expressions because it might be a reference
4125 to a const array but whose index contains side-effects. But we can
4126 ignore things that are actual constant or that already have been
4127 handled by this function. */
4128
4129 if (tree_invariant_p (e))
4130 return e;
4131
4132 switch (TREE_CODE_CLASS (code))
4133 {
4134 case tcc_exceptional:
4135 case tcc_type:
4136 case tcc_declaration:
4137 case tcc_comparison:
4138 case tcc_statement:
4139 case tcc_expression:
4140 case tcc_reference:
4141 case tcc_vl_exp:
4142 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4143 so that it will only be evaluated once. */
4144 /* The reference (r) and comparison (<) classes could be handled as
4145 below, but it is generally faster to only evaluate them once. */
4146 if (TREE_SIDE_EFFECTS (e))
4147 return save_expr (e);
4148 return e;
4149
4150 case tcc_constant:
4151 /* Constants need no processing. In fact, we should never reach
4152 here. */
4153 return e;
4154
4155 case tcc_binary:
4156 /* Division is slow and tends to be compiled with jumps,
4157 especially the division by powers of 2 that is often
4158 found inside of an array reference. So do it just once. */
4159 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4160 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4161 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4162 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4163 return save_expr (e);
4164 /* Recursively stabilize each operand. */
4165 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4166 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4167 break;
4168
4169 case tcc_unary:
4170 /* Recursively stabilize each operand. */
4171 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4172 break;
4173
4174 default:
4175 gcc_unreachable ();
4176 }
4177
4178 TREE_TYPE (result) = TREE_TYPE (e);
4179 TREE_READONLY (result) = TREE_READONLY (e);
4180 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4181 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4182
4183 return result;
4184 }
4185
4186 /* Stabilize a reference so that we can use it any number of times
4187 without causing its operands to be evaluated more than once.
4188 Returns the stabilized reference. This works by means of save_expr,
4189 so see the caveats in the comments about save_expr.
4190
4191 Also allows conversion expressions whose operands are references.
4192 Any other kind of expression is returned unchanged. */
4193
4194 tree
4195 stabilize_reference (tree ref)
4196 {
4197 tree result;
4198 enum tree_code code = TREE_CODE (ref);
4199
4200 switch (code)
4201 {
4202 case VAR_DECL:
4203 case PARM_DECL:
4204 case RESULT_DECL:
4205 /* No action is needed in this case. */
4206 return ref;
4207
4208 CASE_CONVERT:
4209 case FLOAT_EXPR:
4210 case FIX_TRUNC_EXPR:
4211 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4212 break;
4213
4214 case INDIRECT_REF:
4215 result = build_nt (INDIRECT_REF,
4216 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4217 break;
4218
4219 case COMPONENT_REF:
4220 result = build_nt (COMPONENT_REF,
4221 stabilize_reference (TREE_OPERAND (ref, 0)),
4222 TREE_OPERAND (ref, 1), NULL_TREE);
4223 break;
4224
4225 case BIT_FIELD_REF:
4226 result = build_nt (BIT_FIELD_REF,
4227 stabilize_reference (TREE_OPERAND (ref, 0)),
4228 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4229 REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
4230 break;
4231
4232 case ARRAY_REF:
4233 result = build_nt (ARRAY_REF,
4234 stabilize_reference (TREE_OPERAND (ref, 0)),
4235 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4236 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4237 break;
4238
4239 case ARRAY_RANGE_REF:
4240 result = build_nt (ARRAY_RANGE_REF,
4241 stabilize_reference (TREE_OPERAND (ref, 0)),
4242 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4243 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4244 break;
4245
4246 case COMPOUND_EXPR:
4247 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4248 it wouldn't be ignored. This matters when dealing with
4249 volatiles. */
4250 return stabilize_reference_1 (ref);
4251
4252 /* If arg isn't a kind of lvalue we recognize, make no change.
4253 Caller should recognize the error for an invalid lvalue. */
4254 default:
4255 return ref;
4256
4257 case ERROR_MARK:
4258 return error_mark_node;
4259 }
4260
4261 TREE_TYPE (result) = TREE_TYPE (ref);
4262 TREE_READONLY (result) = TREE_READONLY (ref);
4263 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4264 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4265
4266 return result;
4267 }
4268 \f
4269 /* Low-level constructors for expressions. */
4270
4271 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4272 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4273
4274 void
4275 recompute_tree_invariant_for_addr_expr (tree t)
4276 {
4277 tree node;
4278 bool tc = true, se = false;
4279
4280 gcc_assert (TREE_CODE (t) == ADDR_EXPR);
4281
4282 /* We started out assuming this address is both invariant and constant, but
4283 does not have side effects. Now go down any handled components and see if
4284 any of them involve offsets that are either non-constant or non-invariant.
4285 Also check for side-effects.
4286
4287 ??? Note that this code makes no attempt to deal with the case where
4288 taking the address of something causes a copy due to misalignment. */
4289
4290 #define UPDATE_FLAGS(NODE) \
4291 do { tree _node = (NODE); \
4292 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4293 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4294
4295 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4296 node = TREE_OPERAND (node, 0))
4297 {
4298 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4299 array reference (probably made temporarily by the G++ front end),
4300 so ignore all the operands. */
4301 if ((TREE_CODE (node) == ARRAY_REF
4302 || TREE_CODE (node) == ARRAY_RANGE_REF)
4303 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4304 {
4305 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4306 if (TREE_OPERAND (node, 2))
4307 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4308 if (TREE_OPERAND (node, 3))
4309 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4310 }
4311 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4312 FIELD_DECL, apparently. The G++ front end can put something else
4313 there, at least temporarily. */
4314 else if (TREE_CODE (node) == COMPONENT_REF
4315 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4316 {
4317 if (TREE_OPERAND (node, 2))
4318 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4319 }
4320 }
4321
4322 node = lang_hooks.expr_to_decl (node, &tc, &se);
4323
4324 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4325 the address, since &(*a)->b is a form of addition. If it's a constant, the
4326 address is constant too. If it's a decl, its address is constant if the
4327 decl is static. Everything else is not constant and, furthermore,
4328 taking the address of a volatile variable is not volatile. */
4329 if (TREE_CODE (node) == INDIRECT_REF
4330 || TREE_CODE (node) == MEM_REF)
4331 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4332 else if (CONSTANT_CLASS_P (node))
4333 ;
4334 else if (DECL_P (node))
4335 tc &= (staticp (node) != NULL_TREE);
4336 else
4337 {
4338 tc = false;
4339 se |= TREE_SIDE_EFFECTS (node);
4340 }
4341
4342
4343 TREE_CONSTANT (t) = tc;
4344 TREE_SIDE_EFFECTS (t) = se;
4345 #undef UPDATE_FLAGS
4346 }
4347
4348 /* Build an expression of code CODE, data type TYPE, and operands as
4349 specified. Expressions and reference nodes can be created this way.
4350 Constants, decls, types and misc nodes cannot be.
4351
4352 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4353 enough for all extant tree codes. */
4354
4355 tree
4356 build0_stat (enum tree_code code, tree tt MEM_STAT_DECL)
4357 {
4358 tree t;
4359
4360 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4361
4362 t = make_node_stat (code PASS_MEM_STAT);
4363 TREE_TYPE (t) = tt;
4364
4365 return t;
4366 }
4367
4368 tree
4369 build1_stat (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4370 {
4371 int length = sizeof (struct tree_exp);
4372 tree t;
4373
4374 record_node_allocation_statistics (code, length);
4375
4376 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4377
4378 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4379
4380 memset (t, 0, sizeof (struct tree_common));
4381
4382 TREE_SET_CODE (t, code);
4383
4384 TREE_TYPE (t) = type;
4385 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4386 TREE_OPERAND (t, 0) = node;
4387 if (node && !TYPE_P (node))
4388 {
4389 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4390 TREE_READONLY (t) = TREE_READONLY (node);
4391 }
4392
4393 if (TREE_CODE_CLASS (code) == tcc_statement)
4394 TREE_SIDE_EFFECTS (t) = 1;
4395 else switch (code)
4396 {
4397 case VA_ARG_EXPR:
4398 /* All of these have side-effects, no matter what their
4399 operands are. */
4400 TREE_SIDE_EFFECTS (t) = 1;
4401 TREE_READONLY (t) = 0;
4402 break;
4403
4404 case INDIRECT_REF:
4405 /* Whether a dereference is readonly has nothing to do with whether
4406 its operand is readonly. */
4407 TREE_READONLY (t) = 0;
4408 break;
4409
4410 case ADDR_EXPR:
4411 if (node)
4412 recompute_tree_invariant_for_addr_expr (t);
4413 break;
4414
4415 default:
4416 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4417 && node && !TYPE_P (node)
4418 && TREE_CONSTANT (node))
4419 TREE_CONSTANT (t) = 1;
4420 if (TREE_CODE_CLASS (code) == tcc_reference
4421 && node && TREE_THIS_VOLATILE (node))
4422 TREE_THIS_VOLATILE (t) = 1;
4423 break;
4424 }
4425
4426 return t;
4427 }
4428
4429 #define PROCESS_ARG(N) \
4430 do { \
4431 TREE_OPERAND (t, N) = arg##N; \
4432 if (arg##N &&!TYPE_P (arg##N)) \
4433 { \
4434 if (TREE_SIDE_EFFECTS (arg##N)) \
4435 side_effects = 1; \
4436 if (!TREE_READONLY (arg##N) \
4437 && !CONSTANT_CLASS_P (arg##N)) \
4438 (void) (read_only = 0); \
4439 if (!TREE_CONSTANT (arg##N)) \
4440 (void) (constant = 0); \
4441 } \
4442 } while (0)
4443
4444 tree
4445 build2_stat (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4446 {
4447 bool constant, read_only, side_effects;
4448 tree t;
4449
4450 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4451
4452 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4453 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4454 /* When sizetype precision doesn't match that of pointers
4455 we need to be able to build explicit extensions or truncations
4456 of the offset argument. */
4457 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4458 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4459 && TREE_CODE (arg1) == INTEGER_CST);
4460
4461 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4462 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4463 && ptrofftype_p (TREE_TYPE (arg1)));
4464
4465 t = make_node_stat (code PASS_MEM_STAT);
4466 TREE_TYPE (t) = tt;
4467
4468 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4469 result based on those same flags for the arguments. But if the
4470 arguments aren't really even `tree' expressions, we shouldn't be trying
4471 to do this. */
4472
4473 /* Expressions without side effects may be constant if their
4474 arguments are as well. */
4475 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4476 || TREE_CODE_CLASS (code) == tcc_binary);
4477 read_only = 1;
4478 side_effects = TREE_SIDE_EFFECTS (t);
4479
4480 PROCESS_ARG (0);
4481 PROCESS_ARG (1);
4482
4483 TREE_SIDE_EFFECTS (t) = side_effects;
4484 if (code == MEM_REF)
4485 {
4486 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4487 {
4488 tree o = TREE_OPERAND (arg0, 0);
4489 TREE_READONLY (t) = TREE_READONLY (o);
4490 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4491 }
4492 }
4493 else
4494 {
4495 TREE_READONLY (t) = read_only;
4496 TREE_CONSTANT (t) = constant;
4497 TREE_THIS_VOLATILE (t)
4498 = (TREE_CODE_CLASS (code) == tcc_reference
4499 && arg0 && TREE_THIS_VOLATILE (arg0));
4500 }
4501
4502 return t;
4503 }
4504
4505
4506 tree
4507 build3_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4508 tree arg2 MEM_STAT_DECL)
4509 {
4510 bool constant, read_only, side_effects;
4511 tree t;
4512
4513 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4514 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4515
4516 t = make_node_stat (code PASS_MEM_STAT);
4517 TREE_TYPE (t) = tt;
4518
4519 read_only = 1;
4520
4521 /* As a special exception, if COND_EXPR has NULL branches, we
4522 assume that it is a gimple statement and always consider
4523 it to have side effects. */
4524 if (code == COND_EXPR
4525 && tt == void_type_node
4526 && arg1 == NULL_TREE
4527 && arg2 == NULL_TREE)
4528 side_effects = true;
4529 else
4530 side_effects = TREE_SIDE_EFFECTS (t);
4531
4532 PROCESS_ARG (0);
4533 PROCESS_ARG (1);
4534 PROCESS_ARG (2);
4535
4536 if (code == COND_EXPR)
4537 TREE_READONLY (t) = read_only;
4538
4539 TREE_SIDE_EFFECTS (t) = side_effects;
4540 TREE_THIS_VOLATILE (t)
4541 = (TREE_CODE_CLASS (code) == tcc_reference
4542 && arg0 && TREE_THIS_VOLATILE (arg0));
4543
4544 return t;
4545 }
4546
4547 tree
4548 build4_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4549 tree arg2, tree arg3 MEM_STAT_DECL)
4550 {
4551 bool constant, read_only, side_effects;
4552 tree t;
4553
4554 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4555
4556 t = make_node_stat (code PASS_MEM_STAT);
4557 TREE_TYPE (t) = tt;
4558
4559 side_effects = TREE_SIDE_EFFECTS (t);
4560
4561 PROCESS_ARG (0);
4562 PROCESS_ARG (1);
4563 PROCESS_ARG (2);
4564 PROCESS_ARG (3);
4565
4566 TREE_SIDE_EFFECTS (t) = side_effects;
4567 TREE_THIS_VOLATILE (t)
4568 = (TREE_CODE_CLASS (code) == tcc_reference
4569 && arg0 && TREE_THIS_VOLATILE (arg0));
4570
4571 return t;
4572 }
4573
4574 tree
4575 build5_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4576 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4577 {
4578 bool constant, read_only, side_effects;
4579 tree t;
4580
4581 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4582
4583 t = make_node_stat (code PASS_MEM_STAT);
4584 TREE_TYPE (t) = tt;
4585
4586 side_effects = TREE_SIDE_EFFECTS (t);
4587
4588 PROCESS_ARG (0);
4589 PROCESS_ARG (1);
4590 PROCESS_ARG (2);
4591 PROCESS_ARG (3);
4592 PROCESS_ARG (4);
4593
4594 TREE_SIDE_EFFECTS (t) = side_effects;
4595 if (code == TARGET_MEM_REF)
4596 {
4597 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4598 {
4599 tree o = TREE_OPERAND (arg0, 0);
4600 TREE_READONLY (t) = TREE_READONLY (o);
4601 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4602 }
4603 }
4604 else
4605 TREE_THIS_VOLATILE (t)
4606 = (TREE_CODE_CLASS (code) == tcc_reference
4607 && arg0 && TREE_THIS_VOLATILE (arg0));
4608
4609 return t;
4610 }
4611
4612 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4613 on the pointer PTR. */
4614
4615 tree
4616 build_simple_mem_ref_loc (location_t loc, tree ptr)
4617 {
4618 HOST_WIDE_INT offset = 0;
4619 tree ptype = TREE_TYPE (ptr);
4620 tree tem;
4621 /* For convenience allow addresses that collapse to a simple base
4622 and offset. */
4623 if (TREE_CODE (ptr) == ADDR_EXPR
4624 && (handled_component_p (TREE_OPERAND (ptr, 0))
4625 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4626 {
4627 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4628 gcc_assert (ptr);
4629 ptr = build_fold_addr_expr (ptr);
4630 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4631 }
4632 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4633 ptr, build_int_cst (ptype, offset));
4634 SET_EXPR_LOCATION (tem, loc);
4635 return tem;
4636 }
4637
4638 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4639
4640 offset_int
4641 mem_ref_offset (const_tree t)
4642 {
4643 return offset_int::from (TREE_OPERAND (t, 1), SIGNED);
4644 }
4645
4646 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4647 offsetted by OFFSET units. */
4648
4649 tree
4650 build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
4651 {
4652 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4653 build_fold_addr_expr (base),
4654 build_int_cst (ptr_type_node, offset));
4655 tree addr = build1 (ADDR_EXPR, type, ref);
4656 recompute_tree_invariant_for_addr_expr (addr);
4657 return addr;
4658 }
4659
4660 /* Similar except don't specify the TREE_TYPE
4661 and leave the TREE_SIDE_EFFECTS as 0.
4662 It is permissible for arguments to be null,
4663 or even garbage if their values do not matter. */
4664
4665 tree
4666 build_nt (enum tree_code code, ...)
4667 {
4668 tree t;
4669 int length;
4670 int i;
4671 va_list p;
4672
4673 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4674
4675 va_start (p, code);
4676
4677 t = make_node (code);
4678 length = TREE_CODE_LENGTH (code);
4679
4680 for (i = 0; i < length; i++)
4681 TREE_OPERAND (t, i) = va_arg (p, tree);
4682
4683 va_end (p);
4684 return t;
4685 }
4686
4687 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4688 tree vec. */
4689
4690 tree
4691 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4692 {
4693 tree ret, t;
4694 unsigned int ix;
4695
4696 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4697 CALL_EXPR_FN (ret) = fn;
4698 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4699 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4700 CALL_EXPR_ARG (ret, ix) = t;
4701 return ret;
4702 }
4703 \f
4704 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4705 We do NOT enter this node in any sort of symbol table.
4706
4707 LOC is the location of the decl.
4708
4709 layout_decl is used to set up the decl's storage layout.
4710 Other slots are initialized to 0 or null pointers. */
4711
4712 tree
4713 build_decl_stat (location_t loc, enum tree_code code, tree name,
4714 tree type MEM_STAT_DECL)
4715 {
4716 tree t;
4717
4718 t = make_node_stat (code PASS_MEM_STAT);
4719 DECL_SOURCE_LOCATION (t) = loc;
4720
4721 /* if (type == error_mark_node)
4722 type = integer_type_node; */
4723 /* That is not done, deliberately, so that having error_mark_node
4724 as the type can suppress useless errors in the use of this variable. */
4725
4726 DECL_NAME (t) = name;
4727 TREE_TYPE (t) = type;
4728
4729 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4730 layout_decl (t, 0);
4731
4732 return t;
4733 }
4734
4735 /* Builds and returns function declaration with NAME and TYPE. */
4736
4737 tree
4738 build_fn_decl (const char *name, tree type)
4739 {
4740 tree id = get_identifier (name);
4741 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4742
4743 DECL_EXTERNAL (decl) = 1;
4744 TREE_PUBLIC (decl) = 1;
4745 DECL_ARTIFICIAL (decl) = 1;
4746 TREE_NOTHROW (decl) = 1;
4747
4748 return decl;
4749 }
4750
4751 vec<tree, va_gc> *all_translation_units;
4752
4753 /* Builds a new translation-unit decl with name NAME, queues it in the
4754 global list of translation-unit decls and returns it. */
4755
4756 tree
4757 build_translation_unit_decl (tree name)
4758 {
4759 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
4760 name, NULL_TREE);
4761 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
4762 vec_safe_push (all_translation_units, tu);
4763 return tu;
4764 }
4765
4766 \f
4767 /* BLOCK nodes are used to represent the structure of binding contours
4768 and declarations, once those contours have been exited and their contents
4769 compiled. This information is used for outputting debugging info. */
4770
4771 tree
4772 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
4773 {
4774 tree block = make_node (BLOCK);
4775
4776 BLOCK_VARS (block) = vars;
4777 BLOCK_SUBBLOCKS (block) = subblocks;
4778 BLOCK_SUPERCONTEXT (block) = supercontext;
4779 BLOCK_CHAIN (block) = chain;
4780 return block;
4781 }
4782
4783 \f
4784 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4785
4786 LOC is the location to use in tree T. */
4787
4788 void
4789 protected_set_expr_location (tree t, location_t loc)
4790 {
4791 if (CAN_HAVE_LOCATION_P (t))
4792 SET_EXPR_LOCATION (t, loc);
4793 }
4794 \f
4795 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES
4796 is ATTRIBUTE. */
4797
4798 tree
4799 build_decl_attribute_variant (tree ddecl, tree attribute)
4800 {
4801 DECL_ATTRIBUTES (ddecl) = attribute;
4802 return ddecl;
4803 }
4804
4805 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4806 is ATTRIBUTE and its qualifiers are QUALS.
4807
4808 Record such modified types already made so we don't make duplicates. */
4809
4810 tree
4811 build_type_attribute_qual_variant (tree ttype, tree attribute, int quals)
4812 {
4813 if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype), attribute))
4814 {
4815 inchash::hash hstate;
4816 tree ntype;
4817 int i;
4818 tree t;
4819 enum tree_code code = TREE_CODE (ttype);
4820
4821 /* Building a distinct copy of a tagged type is inappropriate; it
4822 causes breakage in code that expects there to be a one-to-one
4823 relationship between a struct and its fields.
4824 build_duplicate_type is another solution (as used in
4825 handle_transparent_union_attribute), but that doesn't play well
4826 with the stronger C++ type identity model. */
4827 if (TREE_CODE (ttype) == RECORD_TYPE
4828 || TREE_CODE (ttype) == UNION_TYPE
4829 || TREE_CODE (ttype) == QUAL_UNION_TYPE
4830 || TREE_CODE (ttype) == ENUMERAL_TYPE)
4831 {
4832 warning (OPT_Wattributes,
4833 "ignoring attributes applied to %qT after definition",
4834 TYPE_MAIN_VARIANT (ttype));
4835 return build_qualified_type (ttype, quals);
4836 }
4837
4838 ttype = build_qualified_type (ttype, TYPE_UNQUALIFIED);
4839 ntype = build_distinct_type_copy (ttype);
4840
4841 TYPE_ATTRIBUTES (ntype) = attribute;
4842
4843 hstate.add_int (code);
4844 if (TREE_TYPE (ntype))
4845 hstate.add_object (TYPE_HASH (TREE_TYPE (ntype)));
4846 attribute_hash_list (attribute, hstate);
4847
4848 switch (TREE_CODE (ntype))
4849 {
4850 case FUNCTION_TYPE:
4851 type_hash_list (TYPE_ARG_TYPES (ntype), hstate);
4852 break;
4853 case ARRAY_TYPE:
4854 if (TYPE_DOMAIN (ntype))
4855 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (ntype)));
4856 break;
4857 case INTEGER_TYPE:
4858 t = TYPE_MAX_VALUE (ntype);
4859 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
4860 hstate.add_object (TREE_INT_CST_ELT (t, i));
4861 break;
4862 case REAL_TYPE:
4863 case FIXED_POINT_TYPE:
4864 {
4865 unsigned int precision = TYPE_PRECISION (ntype);
4866 hstate.add_object (precision);
4867 }
4868 break;
4869 default:
4870 break;
4871 }
4872
4873 ntype = type_hash_canon (hstate.end(), ntype);
4874
4875 /* If the target-dependent attributes make NTYPE different from
4876 its canonical type, we will need to use structural equality
4877 checks for this type. */
4878 if (TYPE_STRUCTURAL_EQUALITY_P (ttype)
4879 || !comp_type_attributes (ntype, ttype))
4880 SET_TYPE_STRUCTURAL_EQUALITY (ntype);
4881 else if (TYPE_CANONICAL (ntype) == ntype)
4882 TYPE_CANONICAL (ntype) = TYPE_CANONICAL (ttype);
4883
4884 ttype = build_qualified_type (ntype, quals);
4885 }
4886 else if (TYPE_QUALS (ttype) != quals)
4887 ttype = build_qualified_type (ttype, quals);
4888
4889 return ttype;
4890 }
4891
4892 /* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are
4893 the same. */
4894
4895 static bool
4896 omp_declare_simd_clauses_equal (tree clauses1, tree clauses2)
4897 {
4898 tree cl1, cl2;
4899 for (cl1 = clauses1, cl2 = clauses2;
4900 cl1 && cl2;
4901 cl1 = OMP_CLAUSE_CHAIN (cl1), cl2 = OMP_CLAUSE_CHAIN (cl2))
4902 {
4903 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_CODE (cl2))
4904 return false;
4905 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_SIMDLEN)
4906 {
4907 if (simple_cst_equal (OMP_CLAUSE_DECL (cl1),
4908 OMP_CLAUSE_DECL (cl2)) != 1)
4909 return false;
4910 }
4911 switch (OMP_CLAUSE_CODE (cl1))
4912 {
4913 case OMP_CLAUSE_ALIGNED:
4914 if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1),
4915 OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2)) != 1)
4916 return false;
4917 break;
4918 case OMP_CLAUSE_LINEAR:
4919 if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1),
4920 OMP_CLAUSE_LINEAR_STEP (cl2)) != 1)
4921 return false;
4922 break;
4923 case OMP_CLAUSE_SIMDLEN:
4924 if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1),
4925 OMP_CLAUSE_SIMDLEN_EXPR (cl2)) != 1)
4926 return false;
4927 default:
4928 break;
4929 }
4930 }
4931 return true;
4932 }
4933
4934 /* Compare two constructor-element-type constants. Return 1 if the lists
4935 are known to be equal; otherwise return 0. */
4936
4937 static bool
4938 simple_cst_list_equal (const_tree l1, const_tree l2)
4939 {
4940 while (l1 != NULL_TREE && l2 != NULL_TREE)
4941 {
4942 if (simple_cst_equal (TREE_VALUE (l1), TREE_VALUE (l2)) != 1)
4943 return false;
4944
4945 l1 = TREE_CHAIN (l1);
4946 l2 = TREE_CHAIN (l2);
4947 }
4948
4949 return l1 == l2;
4950 }
4951
4952 /* Compare two identifier nodes representing attributes. Either one may
4953 be in wrapped __ATTR__ form. Return true if they are the same, false
4954 otherwise. */
4955
4956 static bool
4957 cmp_attrib_identifiers (const_tree attr1, const_tree attr2)
4958 {
4959 /* Make sure we're dealing with IDENTIFIER_NODEs. */
4960 gcc_checking_assert (TREE_CODE (attr1) == IDENTIFIER_NODE
4961 && TREE_CODE (attr2) == IDENTIFIER_NODE);
4962
4963 /* Identifiers can be compared directly for equality. */
4964 if (attr1 == attr2)
4965 return true;
4966
4967 /* If they are not equal, they may still be one in the form
4968 'text' while the other one is in the form '__text__'. TODO:
4969 If we were storing attributes in normalized 'text' form, then
4970 this could all go away and we could take full advantage of
4971 the fact that we're comparing identifiers. :-) */
4972 const size_t attr1_len = IDENTIFIER_LENGTH (attr1);
4973 const size_t attr2_len = IDENTIFIER_LENGTH (attr2);
4974
4975 if (attr2_len == attr1_len + 4)
4976 {
4977 const char *p = IDENTIFIER_POINTER (attr2);
4978 const char *q = IDENTIFIER_POINTER (attr1);
4979 if (p[0] == '_' && p[1] == '_'
4980 && p[attr2_len - 2] == '_' && p[attr2_len - 1] == '_'
4981 && strncmp (q, p + 2, attr1_len) == 0)
4982 return true;;
4983 }
4984 else if (attr2_len + 4 == attr1_len)
4985 {
4986 const char *p = IDENTIFIER_POINTER (attr2);
4987 const char *q = IDENTIFIER_POINTER (attr1);
4988 if (q[0] == '_' && q[1] == '_'
4989 && q[attr1_len - 2] == '_' && q[attr1_len - 1] == '_'
4990 && strncmp (q + 2, p, attr2_len) == 0)
4991 return true;
4992 }
4993
4994 return false;
4995 }
4996
4997 /* Compare two attributes for their value identity. Return true if the
4998 attribute values are known to be equal; otherwise return false. */
4999
5000 bool
5001 attribute_value_equal (const_tree attr1, const_tree attr2)
5002 {
5003 if (TREE_VALUE (attr1) == TREE_VALUE (attr2))
5004 return true;
5005
5006 if (TREE_VALUE (attr1) != NULL_TREE
5007 && TREE_CODE (TREE_VALUE (attr1)) == TREE_LIST
5008 && TREE_VALUE (attr2) != NULL_TREE
5009 && TREE_CODE (TREE_VALUE (attr2)) == TREE_LIST)
5010 {
5011 /* Handle attribute format. */
5012 if (is_attribute_p ("format", TREE_PURPOSE (attr1)))
5013 {
5014 attr1 = TREE_VALUE (attr1);
5015 attr2 = TREE_VALUE (attr2);
5016 /* Compare the archetypes (printf/scanf/strftime/...). */
5017 if (!cmp_attrib_identifiers (TREE_VALUE (attr1),
5018 TREE_VALUE (attr2)))
5019 return false;
5020 /* Archetypes are the same. Compare the rest. */
5021 return (simple_cst_list_equal (TREE_CHAIN (attr1),
5022 TREE_CHAIN (attr2)) == 1);
5023 }
5024 return (simple_cst_list_equal (TREE_VALUE (attr1),
5025 TREE_VALUE (attr2)) == 1);
5026 }
5027
5028 if ((flag_openmp || flag_openmp_simd)
5029 && TREE_VALUE (attr1) && TREE_VALUE (attr2)
5030 && TREE_CODE (TREE_VALUE (attr1)) == OMP_CLAUSE
5031 && TREE_CODE (TREE_VALUE (attr2)) == OMP_CLAUSE)
5032 return omp_declare_simd_clauses_equal (TREE_VALUE (attr1),
5033 TREE_VALUE (attr2));
5034
5035 return (simple_cst_equal (TREE_VALUE (attr1), TREE_VALUE (attr2)) == 1);
5036 }
5037
5038 /* Return 0 if the attributes for two types are incompatible, 1 if they
5039 are compatible, and 2 if they are nearly compatible (which causes a
5040 warning to be generated). */
5041 int
5042 comp_type_attributes (const_tree type1, const_tree type2)
5043 {
5044 const_tree a1 = TYPE_ATTRIBUTES (type1);
5045 const_tree a2 = TYPE_ATTRIBUTES (type2);
5046 const_tree a;
5047
5048 if (a1 == a2)
5049 return 1;
5050 for (a = a1; a != NULL_TREE; a = TREE_CHAIN (a))
5051 {
5052 const struct attribute_spec *as;
5053 const_tree attr;
5054
5055 as = lookup_attribute_spec (get_attribute_name (a));
5056 if (!as || as->affects_type_identity == false)
5057 continue;
5058
5059 attr = lookup_attribute (as->name, CONST_CAST_TREE (a2));
5060 if (!attr || !attribute_value_equal (a, attr))
5061 break;
5062 }
5063 if (!a)
5064 {
5065 for (a = a2; a != NULL_TREE; a = TREE_CHAIN (a))
5066 {
5067 const struct attribute_spec *as;
5068
5069 as = lookup_attribute_spec (get_attribute_name (a));
5070 if (!as || as->affects_type_identity == false)
5071 continue;
5072
5073 if (!lookup_attribute (as->name, CONST_CAST_TREE (a1)))
5074 break;
5075 /* We don't need to compare trees again, as we did this
5076 already in first loop. */
5077 }
5078 /* All types - affecting identity - are equal, so
5079 there is no need to call target hook for comparison. */
5080 if (!a)
5081 return 1;
5082 }
5083 if (lookup_attribute ("transaction_safe", CONST_CAST_TREE (a)))
5084 return 0;
5085 /* As some type combinations - like default calling-convention - might
5086 be compatible, we have to call the target hook to get the final result. */
5087 return targetm.comp_type_attributes (type1, type2);
5088 }
5089
5090 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
5091 is ATTRIBUTE.
5092
5093 Record such modified types already made so we don't make duplicates. */
5094
5095 tree
5096 build_type_attribute_variant (tree ttype, tree attribute)
5097 {
5098 return build_type_attribute_qual_variant (ttype, attribute,
5099 TYPE_QUALS (ttype));
5100 }
5101
5102
5103 /* Reset the expression *EXPR_P, a size or position.
5104
5105 ??? We could reset all non-constant sizes or positions. But it's cheap
5106 enough to not do so and refrain from adding workarounds to dwarf2out.c.
5107
5108 We need to reset self-referential sizes or positions because they cannot
5109 be gimplified and thus can contain a CALL_EXPR after the gimplification
5110 is finished, which will run afoul of LTO streaming. And they need to be
5111 reset to something essentially dummy but not constant, so as to preserve
5112 the properties of the object they are attached to. */
5113
5114 static inline void
5115 free_lang_data_in_one_sizepos (tree *expr_p)
5116 {
5117 tree expr = *expr_p;
5118 if (CONTAINS_PLACEHOLDER_P (expr))
5119 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
5120 }
5121
5122
5123 /* Reset all the fields in a binfo node BINFO. We only keep
5124 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
5125
5126 static void
5127 free_lang_data_in_binfo (tree binfo)
5128 {
5129 unsigned i;
5130 tree t;
5131
5132 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
5133
5134 BINFO_VIRTUALS (binfo) = NULL_TREE;
5135 BINFO_BASE_ACCESSES (binfo) = NULL;
5136 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
5137 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
5138
5139 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
5140 free_lang_data_in_binfo (t);
5141 }
5142
5143
5144 /* Reset all language specific information still present in TYPE. */
5145
5146 static void
5147 free_lang_data_in_type (tree type)
5148 {
5149 gcc_assert (TYPE_P (type));
5150
5151 /* Give the FE a chance to remove its own data first. */
5152 lang_hooks.free_lang_data (type);
5153
5154 TREE_LANG_FLAG_0 (type) = 0;
5155 TREE_LANG_FLAG_1 (type) = 0;
5156 TREE_LANG_FLAG_2 (type) = 0;
5157 TREE_LANG_FLAG_3 (type) = 0;
5158 TREE_LANG_FLAG_4 (type) = 0;
5159 TREE_LANG_FLAG_5 (type) = 0;
5160 TREE_LANG_FLAG_6 (type) = 0;
5161
5162 if (TREE_CODE (type) == FUNCTION_TYPE)
5163 {
5164 /* Remove the const and volatile qualifiers from arguments. The
5165 C++ front end removes them, but the C front end does not,
5166 leading to false ODR violation errors when merging two
5167 instances of the same function signature compiled by
5168 different front ends. */
5169 tree p;
5170
5171 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5172 {
5173 tree arg_type = TREE_VALUE (p);
5174
5175 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
5176 {
5177 int quals = TYPE_QUALS (arg_type)
5178 & ~TYPE_QUAL_CONST
5179 & ~TYPE_QUAL_VOLATILE;
5180 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
5181 free_lang_data_in_type (TREE_VALUE (p));
5182 }
5183 /* C++ FE uses TREE_PURPOSE to store initial values. */
5184 TREE_PURPOSE (p) = NULL;
5185 }
5186 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
5187 TYPE_MINVAL (type) = NULL;
5188 }
5189 if (TREE_CODE (type) == METHOD_TYPE)
5190 {
5191 tree p;
5192
5193 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5194 {
5195 /* C++ FE uses TREE_PURPOSE to store initial values. */
5196 TREE_PURPOSE (p) = NULL;
5197 }
5198 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
5199 TYPE_MINVAL (type) = NULL;
5200 }
5201
5202 /* Remove members that are not actually FIELD_DECLs from the field
5203 list of an aggregate. These occur in C++. */
5204 if (RECORD_OR_UNION_TYPE_P (type))
5205 {
5206 tree prev, member;
5207
5208 /* Note that TYPE_FIELDS can be shared across distinct
5209 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is
5210 to be removed, we cannot set its TREE_CHAIN to NULL.
5211 Otherwise, we would not be able to find all the other fields
5212 in the other instances of this TREE_TYPE.
5213
5214 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */
5215 prev = NULL_TREE;
5216 member = TYPE_FIELDS (type);
5217 while (member)
5218 {
5219 if (TREE_CODE (member) == FIELD_DECL
5220 || (TREE_CODE (member) == TYPE_DECL
5221 && !DECL_IGNORED_P (member)
5222 && debug_info_level > DINFO_LEVEL_TERSE
5223 && !is_redundant_typedef (member)))
5224 {
5225 if (prev)
5226 TREE_CHAIN (prev) = member;
5227 else
5228 TYPE_FIELDS (type) = member;
5229 prev = member;
5230 }
5231
5232 member = TREE_CHAIN (member);
5233 }
5234
5235 if (prev)
5236 TREE_CHAIN (prev) = NULL_TREE;
5237 else
5238 TYPE_FIELDS (type) = NULL_TREE;
5239
5240 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
5241 and danagle the pointer from time to time. */
5242 if (TYPE_VFIELD (type) && TREE_CODE (TYPE_VFIELD (type)) != FIELD_DECL)
5243 TYPE_VFIELD (type) = NULL_TREE;
5244
5245 /* Remove TYPE_METHODS list. While it would be nice to keep it
5246 to enable ODR warnings about different method lists, doing so
5247 seems to impractically increase size of LTO data streamed.
5248 Keep the information if TYPE_METHODS was non-NULL. This is used
5249 by function.c and pretty printers. */
5250 if (TYPE_METHODS (type))
5251 TYPE_METHODS (type) = error_mark_node;
5252 if (TYPE_BINFO (type))
5253 {
5254 free_lang_data_in_binfo (TYPE_BINFO (type));
5255 /* We need to preserve link to bases and virtual table for all
5256 polymorphic types to make devirtualization machinery working.
5257 Debug output cares only about bases, but output also
5258 virtual table pointers so merging of -fdevirtualize and
5259 -fno-devirtualize units is easier. */
5260 if ((!BINFO_VTABLE (TYPE_BINFO (type))
5261 || !flag_devirtualize)
5262 && ((!BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
5263 && !BINFO_VTABLE (TYPE_BINFO (type)))
5264 || debug_info_level != DINFO_LEVEL_NONE))
5265 TYPE_BINFO (type) = NULL;
5266 }
5267 }
5268 else
5269 {
5270 /* For non-aggregate types, clear out the language slot (which
5271 overloads TYPE_BINFO). */
5272 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
5273
5274 if (INTEGRAL_TYPE_P (type)
5275 || SCALAR_FLOAT_TYPE_P (type)
5276 || FIXED_POINT_TYPE_P (type))
5277 {
5278 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
5279 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
5280 }
5281 }
5282
5283 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
5284 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
5285
5286 if (TYPE_CONTEXT (type)
5287 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
5288 {
5289 tree ctx = TYPE_CONTEXT (type);
5290 do
5291 {
5292 ctx = BLOCK_SUPERCONTEXT (ctx);
5293 }
5294 while (ctx && TREE_CODE (ctx) == BLOCK);
5295 TYPE_CONTEXT (type) = ctx;
5296 }
5297 }
5298
5299
5300 /* Return true if DECL may need an assembler name to be set. */
5301
5302 static inline bool
5303 need_assembler_name_p (tree decl)
5304 {
5305 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
5306 Rule merging. This makes type_odr_p to return true on those types during
5307 LTO and by comparing the mangled name, we can say what types are intended
5308 to be equivalent across compilation unit.
5309
5310 We do not store names of type_in_anonymous_namespace_p.
5311
5312 Record, union and enumeration type have linkage that allows use
5313 to check type_in_anonymous_namespace_p. We do not mangle compound types
5314 that always can be compared structurally.
5315
5316 Similarly for builtin types, we compare properties of their main variant.
5317 A special case are integer types where mangling do make differences
5318 between char/signed char/unsigned char etc. Storing name for these makes
5319 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
5320 See cp/mangle.c:write_builtin_type for details. */
5321
5322 if (flag_lto_odr_type_mering
5323 && TREE_CODE (decl) == TYPE_DECL
5324 && DECL_NAME (decl)
5325 && decl == TYPE_NAME (TREE_TYPE (decl))
5326 && TYPE_MAIN_VARIANT (TREE_TYPE (decl)) == TREE_TYPE (decl)
5327 && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
5328 && (type_with_linkage_p (TREE_TYPE (decl))
5329 || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
5330 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
5331 return !DECL_ASSEMBLER_NAME_SET_P (decl);
5332 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5333 if (TREE_CODE (decl) != FUNCTION_DECL
5334 && TREE_CODE (decl) != VAR_DECL)
5335 return false;
5336
5337 /* If DECL already has its assembler name set, it does not need a
5338 new one. */
5339 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5340 || DECL_ASSEMBLER_NAME_SET_P (decl))
5341 return false;
5342
5343 /* Abstract decls do not need an assembler name. */
5344 if (DECL_ABSTRACT_P (decl))
5345 return false;
5346
5347 /* For VAR_DECLs, only static, public and external symbols need an
5348 assembler name. */
5349 if (TREE_CODE (decl) == VAR_DECL
5350 && !TREE_STATIC (decl)
5351 && !TREE_PUBLIC (decl)
5352 && !DECL_EXTERNAL (decl))
5353 return false;
5354
5355 if (TREE_CODE (decl) == FUNCTION_DECL)
5356 {
5357 /* Do not set assembler name on builtins. Allow RTL expansion to
5358 decide whether to expand inline or via a regular call. */
5359 if (DECL_BUILT_IN (decl)
5360 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5361 return false;
5362
5363 /* Functions represented in the callgraph need an assembler name. */
5364 if (cgraph_node::get (decl) != NULL)
5365 return true;
5366
5367 /* Unused and not public functions don't need an assembler name. */
5368 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5369 return false;
5370 }
5371
5372 return true;
5373 }
5374
5375
5376 /* Reset all language specific information still present in symbol
5377 DECL. */
5378
5379 static void
5380 free_lang_data_in_decl (tree decl)
5381 {
5382 gcc_assert (DECL_P (decl));
5383
5384 /* Give the FE a chance to remove its own data first. */
5385 lang_hooks.free_lang_data (decl);
5386
5387 TREE_LANG_FLAG_0 (decl) = 0;
5388 TREE_LANG_FLAG_1 (decl) = 0;
5389 TREE_LANG_FLAG_2 (decl) = 0;
5390 TREE_LANG_FLAG_3 (decl) = 0;
5391 TREE_LANG_FLAG_4 (decl) = 0;
5392 TREE_LANG_FLAG_5 (decl) = 0;
5393 TREE_LANG_FLAG_6 (decl) = 0;
5394
5395 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5396 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5397 if (TREE_CODE (decl) == FIELD_DECL)
5398 {
5399 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5400 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5401 DECL_QUALIFIER (decl) = NULL_TREE;
5402 }
5403
5404 if (TREE_CODE (decl) == FUNCTION_DECL)
5405 {
5406 struct cgraph_node *node;
5407 if (!(node = cgraph_node::get (decl))
5408 || (!node->definition && !node->clones))
5409 {
5410 if (node)
5411 node->release_body ();
5412 else
5413 {
5414 release_function_body (decl);
5415 DECL_ARGUMENTS (decl) = NULL;
5416 DECL_RESULT (decl) = NULL;
5417 DECL_INITIAL (decl) = error_mark_node;
5418 }
5419 }
5420 if (gimple_has_body_p (decl) || (node && node->thunk.thunk_p))
5421 {
5422 tree t;
5423
5424 /* If DECL has a gimple body, then the context for its
5425 arguments must be DECL. Otherwise, it doesn't really
5426 matter, as we will not be emitting any code for DECL. In
5427 general, there may be other instances of DECL created by
5428 the front end and since PARM_DECLs are generally shared,
5429 their DECL_CONTEXT changes as the replicas of DECL are
5430 created. The only time where DECL_CONTEXT is important
5431 is for the FUNCTION_DECLs that have a gimple body (since
5432 the PARM_DECL will be used in the function's body). */
5433 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5434 DECL_CONTEXT (t) = decl;
5435 if (!DECL_FUNCTION_SPECIFIC_TARGET (decl))
5436 DECL_FUNCTION_SPECIFIC_TARGET (decl)
5437 = target_option_default_node;
5438 if (!DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl))
5439 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
5440 = optimization_default_node;
5441 }
5442
5443 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5444 At this point, it is not needed anymore. */
5445 DECL_SAVED_TREE (decl) = NULL_TREE;
5446
5447 /* Clear the abstract origin if it refers to a method. Otherwise
5448 dwarf2out.c will ICE as we clear TYPE_METHODS and thus the
5449 origin will not be output correctly. */
5450 if (DECL_ABSTRACT_ORIGIN (decl)
5451 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5452 && RECORD_OR_UNION_TYPE_P
5453 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5454 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5455
5456 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5457 DECL_VINDEX referring to itself into a vtable slot number as it
5458 should. Happens with functions that are copied and then forgotten
5459 about. Just clear it, it won't matter anymore. */
5460 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl)))
5461 DECL_VINDEX (decl) = NULL_TREE;
5462 }
5463 else if (TREE_CODE (decl) == VAR_DECL)
5464 {
5465 if ((DECL_EXTERNAL (decl)
5466 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5467 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5468 DECL_INITIAL (decl) = NULL_TREE;
5469 }
5470 else if (TREE_CODE (decl) == TYPE_DECL)
5471 {
5472 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5473 DECL_VISIBILITY_SPECIFIED (decl) = 0;
5474 DECL_INITIAL (decl) = NULL_TREE;
5475 }
5476 else if (TREE_CODE (decl) == FIELD_DECL)
5477 DECL_INITIAL (decl) = NULL_TREE;
5478 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5479 && DECL_INITIAL (decl)
5480 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5481 {
5482 /* Strip builtins from the translation-unit BLOCK. We still have targets
5483 without builtin_decl_explicit support and also builtins are shared
5484 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5485 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5486 while (*nextp)
5487 {
5488 tree var = *nextp;
5489 if (TREE_CODE (var) == FUNCTION_DECL
5490 && DECL_BUILT_IN (var))
5491 *nextp = TREE_CHAIN (var);
5492 else
5493 nextp = &TREE_CHAIN (var);
5494 }
5495 }
5496 }
5497
5498
5499 /* Data used when collecting DECLs and TYPEs for language data removal. */
5500
5501 struct free_lang_data_d
5502 {
5503 /* Worklist to avoid excessive recursion. */
5504 vec<tree> worklist;
5505
5506 /* Set of traversed objects. Used to avoid duplicate visits. */
5507 hash_set<tree> *pset;
5508
5509 /* Array of symbols to process with free_lang_data_in_decl. */
5510 vec<tree> decls;
5511
5512 /* Array of types to process with free_lang_data_in_type. */
5513 vec<tree> types;
5514 };
5515
5516
5517 /* Save all language fields needed to generate proper debug information
5518 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5519
5520 static void
5521 save_debug_info_for_decl (tree t)
5522 {
5523 /*struct saved_debug_info_d *sdi;*/
5524
5525 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t));
5526
5527 /* FIXME. Partial implementation for saving debug info removed. */
5528 }
5529
5530
5531 /* Save all language fields needed to generate proper debug information
5532 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5533
5534 static void
5535 save_debug_info_for_type (tree t)
5536 {
5537 /*struct saved_debug_info_d *sdi;*/
5538
5539 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t));
5540
5541 /* FIXME. Partial implementation for saving debug info removed. */
5542 }
5543
5544
5545 /* Add type or decl T to one of the list of tree nodes that need their
5546 language data removed. The lists are held inside FLD. */
5547
5548 static void
5549 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5550 {
5551 if (DECL_P (t))
5552 {
5553 fld->decls.safe_push (t);
5554 if (debug_info_level > DINFO_LEVEL_TERSE)
5555 save_debug_info_for_decl (t);
5556 }
5557 else if (TYPE_P (t))
5558 {
5559 fld->types.safe_push (t);
5560 if (debug_info_level > DINFO_LEVEL_TERSE)
5561 save_debug_info_for_type (t);
5562 }
5563 else
5564 gcc_unreachable ();
5565 }
5566
5567 /* Push tree node T into FLD->WORKLIST. */
5568
5569 static inline void
5570 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5571 {
5572 if (t && !is_lang_specific (t) && !fld->pset->contains (t))
5573 fld->worklist.safe_push ((t));
5574 }
5575
5576
5577 /* Operand callback helper for free_lang_data_in_node. *TP is the
5578 subtree operand being considered. */
5579
5580 static tree
5581 find_decls_types_r (tree *tp, int *ws, void *data)
5582 {
5583 tree t = *tp;
5584 struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5585
5586 if (TREE_CODE (t) == TREE_LIST)
5587 return NULL_TREE;
5588
5589 /* Language specific nodes will be removed, so there is no need
5590 to gather anything under them. */
5591 if (is_lang_specific (t))
5592 {
5593 *ws = 0;
5594 return NULL_TREE;
5595 }
5596
5597 if (DECL_P (t))
5598 {
5599 /* Note that walk_tree does not traverse every possible field in
5600 decls, so we have to do our own traversals here. */
5601 add_tree_to_fld_list (t, fld);
5602
5603 fld_worklist_push (DECL_NAME (t), fld);
5604 fld_worklist_push (DECL_CONTEXT (t), fld);
5605 fld_worklist_push (DECL_SIZE (t), fld);
5606 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5607
5608 /* We are going to remove everything under DECL_INITIAL for
5609 TYPE_DECLs. No point walking them. */
5610 if (TREE_CODE (t) != TYPE_DECL)
5611 fld_worklist_push (DECL_INITIAL (t), fld);
5612
5613 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5614 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5615
5616 if (TREE_CODE (t) == FUNCTION_DECL)
5617 {
5618 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5619 fld_worklist_push (DECL_RESULT (t), fld);
5620 }
5621 else if (TREE_CODE (t) == TYPE_DECL)
5622 {
5623 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
5624 }
5625 else if (TREE_CODE (t) == FIELD_DECL)
5626 {
5627 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5628 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5629 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5630 fld_worklist_push (DECL_FCONTEXT (t), fld);
5631 }
5632
5633 if ((TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
5634 && DECL_HAS_VALUE_EXPR_P (t))
5635 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5636
5637 if (TREE_CODE (t) != FIELD_DECL
5638 && TREE_CODE (t) != TYPE_DECL)
5639 fld_worklist_push (TREE_CHAIN (t), fld);
5640 *ws = 0;
5641 }
5642 else if (TYPE_P (t))
5643 {
5644 /* Note that walk_tree does not traverse every possible field in
5645 types, so we have to do our own traversals here. */
5646 add_tree_to_fld_list (t, fld);
5647
5648 if (!RECORD_OR_UNION_TYPE_P (t))
5649 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5650 fld_worklist_push (TYPE_SIZE (t), fld);
5651 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5652 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5653 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5654 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5655 fld_worklist_push (TYPE_NAME (t), fld);
5656 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5657 them and thus do not and want not to reach unused pointer types
5658 this way. */
5659 if (!POINTER_TYPE_P (t))
5660 fld_worklist_push (TYPE_MINVAL (t), fld);
5661 if (!RECORD_OR_UNION_TYPE_P (t))
5662 fld_worklist_push (TYPE_MAXVAL (t), fld);
5663 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5664 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5665 do not and want not to reach unused variants this way. */
5666 if (TYPE_CONTEXT (t))
5667 {
5668 tree ctx = TYPE_CONTEXT (t);
5669 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5670 So push that instead. */
5671 while (ctx && TREE_CODE (ctx) == BLOCK)
5672 ctx = BLOCK_SUPERCONTEXT (ctx);
5673 fld_worklist_push (ctx, fld);
5674 }
5675 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5676 and want not to reach unused types this way. */
5677
5678 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5679 {
5680 unsigned i;
5681 tree tem;
5682 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5683 fld_worklist_push (TREE_TYPE (tem), fld);
5684 tem = BINFO_VIRTUALS (TYPE_BINFO (t));
5685 if (tem
5686 /* The Java FE overloads BINFO_VIRTUALS for its own purpose. */
5687 && TREE_CODE (tem) == TREE_LIST)
5688 do
5689 {
5690 fld_worklist_push (TREE_VALUE (tem), fld);
5691 tem = TREE_CHAIN (tem);
5692 }
5693 while (tem);
5694 }
5695 if (RECORD_OR_UNION_TYPE_P (t))
5696 {
5697 tree tem;
5698 /* Push all TYPE_FIELDS - there can be interleaving interesting
5699 and non-interesting things. */
5700 tem = TYPE_FIELDS (t);
5701 while (tem)
5702 {
5703 if (TREE_CODE (tem) == FIELD_DECL
5704 || (TREE_CODE (tem) == TYPE_DECL
5705 && !DECL_IGNORED_P (tem)
5706 && debug_info_level > DINFO_LEVEL_TERSE
5707 && !is_redundant_typedef (tem)))
5708 fld_worklist_push (tem, fld);
5709 tem = TREE_CHAIN (tem);
5710 }
5711 }
5712
5713 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5714 *ws = 0;
5715 }
5716 else if (TREE_CODE (t) == BLOCK)
5717 {
5718 tree tem;
5719 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
5720 fld_worklist_push (tem, fld);
5721 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5722 fld_worklist_push (tem, fld);
5723 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5724 }
5725
5726 if (TREE_CODE (t) != IDENTIFIER_NODE
5727 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5728 fld_worklist_push (TREE_TYPE (t), fld);
5729
5730 return NULL_TREE;
5731 }
5732
5733
5734 /* Find decls and types in T. */
5735
5736 static void
5737 find_decls_types (tree t, struct free_lang_data_d *fld)
5738 {
5739 while (1)
5740 {
5741 if (!fld->pset->contains (t))
5742 walk_tree (&t, find_decls_types_r, fld, fld->pset);
5743 if (fld->worklist.is_empty ())
5744 break;
5745 t = fld->worklist.pop ();
5746 }
5747 }
5748
5749 /* Translate all the types in LIST with the corresponding runtime
5750 types. */
5751
5752 static tree
5753 get_eh_types_for_runtime (tree list)
5754 {
5755 tree head, prev;
5756
5757 if (list == NULL_TREE)
5758 return NULL_TREE;
5759
5760 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5761 prev = head;
5762 list = TREE_CHAIN (list);
5763 while (list)
5764 {
5765 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5766 TREE_CHAIN (prev) = n;
5767 prev = TREE_CHAIN (prev);
5768 list = TREE_CHAIN (list);
5769 }
5770
5771 return head;
5772 }
5773
5774
5775 /* Find decls and types referenced in EH region R and store them in
5776 FLD->DECLS and FLD->TYPES. */
5777
5778 static void
5779 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5780 {
5781 switch (r->type)
5782 {
5783 case ERT_CLEANUP:
5784 break;
5785
5786 case ERT_TRY:
5787 {
5788 eh_catch c;
5789
5790 /* The types referenced in each catch must first be changed to the
5791 EH types used at runtime. This removes references to FE types
5792 in the region. */
5793 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5794 {
5795 c->type_list = get_eh_types_for_runtime (c->type_list);
5796 walk_tree (&c->type_list, find_decls_types_r, fld, fld->pset);
5797 }
5798 }
5799 break;
5800
5801 case ERT_ALLOWED_EXCEPTIONS:
5802 r->u.allowed.type_list
5803 = get_eh_types_for_runtime (r->u.allowed.type_list);
5804 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, fld->pset);
5805 break;
5806
5807 case ERT_MUST_NOT_THROW:
5808 walk_tree (&r->u.must_not_throw.failure_decl,
5809 find_decls_types_r, fld, fld->pset);
5810 break;
5811 }
5812 }
5813
5814
5815 /* Find decls and types referenced in cgraph node N and store them in
5816 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5817 look for *every* kind of DECL and TYPE node reachable from N,
5818 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5819 NAMESPACE_DECLs, etc). */
5820
5821 static void
5822 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5823 {
5824 basic_block bb;
5825 struct function *fn;
5826 unsigned ix;
5827 tree t;
5828
5829 find_decls_types (n->decl, fld);
5830
5831 if (!gimple_has_body_p (n->decl))
5832 return;
5833
5834 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5835
5836 fn = DECL_STRUCT_FUNCTION (n->decl);
5837
5838 /* Traverse locals. */
5839 FOR_EACH_LOCAL_DECL (fn, ix, t)
5840 find_decls_types (t, fld);
5841
5842 /* Traverse EH regions in FN. */
5843 {
5844 eh_region r;
5845 FOR_ALL_EH_REGION_FN (r, fn)
5846 find_decls_types_in_eh_region (r, fld);
5847 }
5848
5849 /* Traverse every statement in FN. */
5850 FOR_EACH_BB_FN (bb, fn)
5851 {
5852 gphi_iterator psi;
5853 gimple_stmt_iterator si;
5854 unsigned i;
5855
5856 for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
5857 {
5858 gphi *phi = psi.phi ();
5859
5860 for (i = 0; i < gimple_phi_num_args (phi); i++)
5861 {
5862 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5863 find_decls_types (*arg_p, fld);
5864 }
5865 }
5866
5867 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5868 {
5869 gimple *stmt = gsi_stmt (si);
5870
5871 if (is_gimple_call (stmt))
5872 find_decls_types (gimple_call_fntype (stmt), fld);
5873
5874 for (i = 0; i < gimple_num_ops (stmt); i++)
5875 {
5876 tree arg = gimple_op (stmt, i);
5877 find_decls_types (arg, fld);
5878 }
5879 }
5880 }
5881 }
5882
5883
5884 /* Find decls and types referenced in varpool node N and store them in
5885 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5886 look for *every* kind of DECL and TYPE node reachable from N,
5887 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5888 NAMESPACE_DECLs, etc). */
5889
5890 static void
5891 find_decls_types_in_var (varpool_node *v, struct free_lang_data_d *fld)
5892 {
5893 find_decls_types (v->decl, fld);
5894 }
5895
5896 /* If T needs an assembler name, have one created for it. */
5897
5898 void
5899 assign_assembler_name_if_neeeded (tree t)
5900 {
5901 if (need_assembler_name_p (t))
5902 {
5903 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5904 diagnostics that use input_location to show locus
5905 information. The problem here is that, at this point,
5906 input_location is generally anchored to the end of the file
5907 (since the parser is long gone), so we don't have a good
5908 position to pin it to.
5909
5910 To alleviate this problem, this uses the location of T's
5911 declaration. Examples of this are
5912 testsuite/g++.dg/template/cond2.C and
5913 testsuite/g++.dg/template/pr35240.C. */
5914 location_t saved_location = input_location;
5915 input_location = DECL_SOURCE_LOCATION (t);
5916
5917 decl_assembler_name (t);
5918
5919 input_location = saved_location;
5920 }
5921 }
5922
5923
5924 /* Free language specific information for every operand and expression
5925 in every node of the call graph. This process operates in three stages:
5926
5927 1- Every callgraph node and varpool node is traversed looking for
5928 decls and types embedded in them. This is a more exhaustive
5929 search than that done by find_referenced_vars, because it will
5930 also collect individual fields, decls embedded in types, etc.
5931
5932 2- All the decls found are sent to free_lang_data_in_decl.
5933
5934 3- All the types found are sent to free_lang_data_in_type.
5935
5936 The ordering between decls and types is important because
5937 free_lang_data_in_decl sets assembler names, which includes
5938 mangling. So types cannot be freed up until assembler names have
5939 been set up. */
5940
5941 static void
5942 free_lang_data_in_cgraph (void)
5943 {
5944 struct cgraph_node *n;
5945 varpool_node *v;
5946 struct free_lang_data_d fld;
5947 tree t;
5948 unsigned i;
5949 alias_pair *p;
5950
5951 /* Initialize sets and arrays to store referenced decls and types. */
5952 fld.pset = new hash_set<tree>;
5953 fld.worklist.create (0);
5954 fld.decls.create (100);
5955 fld.types.create (100);
5956
5957 /* Find decls and types in the body of every function in the callgraph. */
5958 FOR_EACH_FUNCTION (n)
5959 find_decls_types_in_node (n, &fld);
5960
5961 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
5962 find_decls_types (p->decl, &fld);
5963
5964 /* Find decls and types in every varpool symbol. */
5965 FOR_EACH_VARIABLE (v)
5966 find_decls_types_in_var (v, &fld);
5967
5968 /* Set the assembler name on every decl found. We need to do this
5969 now because free_lang_data_in_decl will invalidate data needed
5970 for mangling. This breaks mangling on interdependent decls. */
5971 FOR_EACH_VEC_ELT (fld.decls, i, t)
5972 assign_assembler_name_if_neeeded (t);
5973
5974 /* Traverse every decl found freeing its language data. */
5975 FOR_EACH_VEC_ELT (fld.decls, i, t)
5976 free_lang_data_in_decl (t);
5977
5978 /* Traverse every type found freeing its language data. */
5979 FOR_EACH_VEC_ELT (fld.types, i, t)
5980 free_lang_data_in_type (t);
5981 if (flag_checking)
5982 {
5983 FOR_EACH_VEC_ELT (fld.types, i, t)
5984 verify_type (t);
5985 }
5986
5987 delete fld.pset;
5988 fld.worklist.release ();
5989 fld.decls.release ();
5990 fld.types.release ();
5991 }
5992
5993
5994 /* Free resources that are used by FE but are not needed once they are done. */
5995
5996 static unsigned
5997 free_lang_data (void)
5998 {
5999 unsigned i;
6000
6001 /* If we are the LTO frontend we have freed lang-specific data already. */
6002 if (in_lto_p
6003 || (!flag_generate_lto && !flag_generate_offload))
6004 return 0;
6005
6006 /* Allocate and assign alias sets to the standard integer types
6007 while the slots are still in the way the frontends generated them. */
6008 for (i = 0; i < itk_none; ++i)
6009 if (integer_types[i])
6010 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
6011
6012 /* Traverse the IL resetting language specific information for
6013 operands, expressions, etc. */
6014 free_lang_data_in_cgraph ();
6015
6016 /* Create gimple variants for common types. */
6017 ptrdiff_type_node = integer_type_node;
6018 fileptr_type_node = ptr_type_node;
6019
6020 /* Reset some langhooks. Do not reset types_compatible_p, it may
6021 still be used indirectly via the get_alias_set langhook. */
6022 lang_hooks.dwarf_name = lhd_dwarf_name;
6023 lang_hooks.decl_printable_name = gimple_decl_printable_name;
6024 lang_hooks.gimplify_expr = lhd_gimplify_expr;
6025
6026 /* We do not want the default decl_assembler_name implementation,
6027 rather if we have fixed everything we want a wrapper around it
6028 asserting that all non-local symbols already got their assembler
6029 name and only produce assembler names for local symbols. Or rather
6030 make sure we never call decl_assembler_name on local symbols and
6031 devise a separate, middle-end private scheme for it. */
6032
6033 /* Reset diagnostic machinery. */
6034 tree_diagnostics_defaults (global_dc);
6035
6036 return 0;
6037 }
6038
6039
6040 namespace {
6041
6042 const pass_data pass_data_ipa_free_lang_data =
6043 {
6044 SIMPLE_IPA_PASS, /* type */
6045 "*free_lang_data", /* name */
6046 OPTGROUP_NONE, /* optinfo_flags */
6047 TV_IPA_FREE_LANG_DATA, /* tv_id */
6048 0, /* properties_required */
6049 0, /* properties_provided */
6050 0, /* properties_destroyed */
6051 0, /* todo_flags_start */
6052 0, /* todo_flags_finish */
6053 };
6054
6055 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
6056 {
6057 public:
6058 pass_ipa_free_lang_data (gcc::context *ctxt)
6059 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
6060 {}
6061
6062 /* opt_pass methods: */
6063 virtual unsigned int execute (function *) { return free_lang_data (); }
6064
6065 }; // class pass_ipa_free_lang_data
6066
6067 } // anon namespace
6068
6069 simple_ipa_opt_pass *
6070 make_pass_ipa_free_lang_data (gcc::context *ctxt)
6071 {
6072 return new pass_ipa_free_lang_data (ctxt);
6073 }
6074
6075 /* The backbone of is_attribute_p(). ATTR_LEN is the string length of
6076 ATTR_NAME. Also used internally by remove_attribute(). */
6077 bool
6078 private_is_attribute_p (const char *attr_name, size_t attr_len, const_tree ident)
6079 {
6080 size_t ident_len = IDENTIFIER_LENGTH (ident);
6081
6082 if (ident_len == attr_len)
6083 {
6084 if (strcmp (attr_name, IDENTIFIER_POINTER (ident)) == 0)
6085 return true;
6086 }
6087 else if (ident_len == attr_len + 4)
6088 {
6089 /* There is the possibility that ATTR is 'text' and IDENT is
6090 '__text__'. */
6091 const char *p = IDENTIFIER_POINTER (ident);
6092 if (p[0] == '_' && p[1] == '_'
6093 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
6094 && strncmp (attr_name, p + 2, attr_len) == 0)
6095 return true;
6096 }
6097
6098 return false;
6099 }
6100
6101 /* The backbone of lookup_attribute(). ATTR_LEN is the string length
6102 of ATTR_NAME, and LIST is not NULL_TREE. */
6103 tree
6104 private_lookup_attribute (const char *attr_name, size_t attr_len, tree list)
6105 {
6106 while (list)
6107 {
6108 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
6109
6110 if (ident_len == attr_len)
6111 {
6112 if (!strcmp (attr_name,
6113 IDENTIFIER_POINTER (get_attribute_name (list))))
6114 break;
6115 }
6116 /* TODO: If we made sure that attributes were stored in the
6117 canonical form without '__...__' (ie, as in 'text' as opposed
6118 to '__text__') then we could avoid the following case. */
6119 else if (ident_len == attr_len + 4)
6120 {
6121 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
6122 if (p[0] == '_' && p[1] == '_'
6123 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
6124 && strncmp (attr_name, p + 2, attr_len) == 0)
6125 break;
6126 }
6127 list = TREE_CHAIN (list);
6128 }
6129
6130 return list;
6131 }
6132
6133 /* Given an attribute name ATTR_NAME and a list of attributes LIST,
6134 return a pointer to the attribute's list first element if the attribute
6135 starts with ATTR_NAME. ATTR_NAME must be in the form 'text' (not
6136 '__text__'). */
6137
6138 tree
6139 private_lookup_attribute_by_prefix (const char *attr_name, size_t attr_len,
6140 tree list)
6141 {
6142 while (list)
6143 {
6144 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
6145
6146 if (attr_len > ident_len)
6147 {
6148 list = TREE_CHAIN (list);
6149 continue;
6150 }
6151
6152 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
6153
6154 if (strncmp (attr_name, p, attr_len) == 0)
6155 break;
6156
6157 /* TODO: If we made sure that attributes were stored in the
6158 canonical form without '__...__' (ie, as in 'text' as opposed
6159 to '__text__') then we could avoid the following case. */
6160 if (p[0] == '_' && p[1] == '_' &&
6161 strncmp (attr_name, p + 2, attr_len) == 0)
6162 break;
6163
6164 list = TREE_CHAIN (list);
6165 }
6166
6167 return list;
6168 }
6169
6170
6171 /* A variant of lookup_attribute() that can be used with an identifier
6172 as the first argument, and where the identifier can be either
6173 'text' or '__text__'.
6174
6175 Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST,
6176 return a pointer to the attribute's list element if the attribute
6177 is part of the list, or NULL_TREE if not found. If the attribute
6178 appears more than once, this only returns the first occurrence; the
6179 TREE_CHAIN of the return value should be passed back in if further
6180 occurrences are wanted. ATTR_IDENTIFIER must be an identifier but
6181 can be in the form 'text' or '__text__'. */
6182 static tree
6183 lookup_ident_attribute (tree attr_identifier, tree list)
6184 {
6185 gcc_checking_assert (TREE_CODE (attr_identifier) == IDENTIFIER_NODE);
6186
6187 while (list)
6188 {
6189 gcc_checking_assert (TREE_CODE (get_attribute_name (list))
6190 == IDENTIFIER_NODE);
6191
6192 if (cmp_attrib_identifiers (attr_identifier,
6193 get_attribute_name (list)))
6194 /* Found it. */
6195 break;
6196 list = TREE_CHAIN (list);
6197 }
6198
6199 return list;
6200 }
6201
6202 /* Remove any instances of attribute ATTR_NAME in LIST and return the
6203 modified list. */
6204
6205 tree
6206 remove_attribute (const char *attr_name, tree list)
6207 {
6208 tree *p;
6209 size_t attr_len = strlen (attr_name);
6210
6211 gcc_checking_assert (attr_name[0] != '_');
6212
6213 for (p = &list; *p; )
6214 {
6215 tree l = *p;
6216 /* TODO: If we were storing attributes in normalized form, here
6217 we could use a simple strcmp(). */
6218 if (private_is_attribute_p (attr_name, attr_len, get_attribute_name (l)))
6219 *p = TREE_CHAIN (l);
6220 else
6221 p = &TREE_CHAIN (l);
6222 }
6223
6224 return list;
6225 }
6226
6227 /* Return an attribute list that is the union of a1 and a2. */
6228
6229 tree
6230 merge_attributes (tree a1, tree a2)
6231 {
6232 tree attributes;
6233
6234 /* Either one unset? Take the set one. */
6235
6236 if ((attributes = a1) == 0)
6237 attributes = a2;
6238
6239 /* One that completely contains the other? Take it. */
6240
6241 else if (a2 != 0 && ! attribute_list_contained (a1, a2))
6242 {
6243 if (attribute_list_contained (a2, a1))
6244 attributes = a2;
6245 else
6246 {
6247 /* Pick the longest list, and hang on the other list. */
6248
6249 if (list_length (a1) < list_length (a2))
6250 attributes = a2, a2 = a1;
6251
6252 for (; a2 != 0; a2 = TREE_CHAIN (a2))
6253 {
6254 tree a;
6255 for (a = lookup_ident_attribute (get_attribute_name (a2),
6256 attributes);
6257 a != NULL_TREE && !attribute_value_equal (a, a2);
6258 a = lookup_ident_attribute (get_attribute_name (a2),
6259 TREE_CHAIN (a)))
6260 ;
6261 if (a == NULL_TREE)
6262 {
6263 a1 = copy_node (a2);
6264 TREE_CHAIN (a1) = attributes;
6265 attributes = a1;
6266 }
6267 }
6268 }
6269 }
6270 return attributes;
6271 }
6272
6273 /* Given types T1 and T2, merge their attributes and return
6274 the result. */
6275
6276 tree
6277 merge_type_attributes (tree t1, tree t2)
6278 {
6279 return merge_attributes (TYPE_ATTRIBUTES (t1),
6280 TYPE_ATTRIBUTES (t2));
6281 }
6282
6283 /* Given decls OLDDECL and NEWDECL, merge their attributes and return
6284 the result. */
6285
6286 tree
6287 merge_decl_attributes (tree olddecl, tree newdecl)
6288 {
6289 return merge_attributes (DECL_ATTRIBUTES (olddecl),
6290 DECL_ATTRIBUTES (newdecl));
6291 }
6292
6293 #if TARGET_DLLIMPORT_DECL_ATTRIBUTES
6294
6295 /* Specialization of merge_decl_attributes for various Windows targets.
6296
6297 This handles the following situation:
6298
6299 __declspec (dllimport) int foo;
6300 int foo;
6301
6302 The second instance of `foo' nullifies the dllimport. */
6303
6304 tree
6305 merge_dllimport_decl_attributes (tree old, tree new_tree)
6306 {
6307 tree a;
6308 int delete_dllimport_p = 1;
6309
6310 /* What we need to do here is remove from `old' dllimport if it doesn't
6311 appear in `new'. dllimport behaves like extern: if a declaration is
6312 marked dllimport and a definition appears later, then the object
6313 is not dllimport'd. We also remove a `new' dllimport if the old list
6314 contains dllexport: dllexport always overrides dllimport, regardless
6315 of the order of declaration. */
6316 if (!VAR_OR_FUNCTION_DECL_P (new_tree))
6317 delete_dllimport_p = 0;
6318 else if (DECL_DLLIMPORT_P (new_tree)
6319 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old)))
6320 {
6321 DECL_DLLIMPORT_P (new_tree) = 0;
6322 warning (OPT_Wattributes, "%q+D already declared with dllexport attribute: "
6323 "dllimport ignored", new_tree);
6324 }
6325 else if (DECL_DLLIMPORT_P (old) && !DECL_DLLIMPORT_P (new_tree))
6326 {
6327 /* Warn about overriding a symbol that has already been used, e.g.:
6328 extern int __attribute__ ((dllimport)) foo;
6329 int* bar () {return &foo;}
6330 int foo;
6331 */
6332 if (TREE_USED (old))
6333 {
6334 warning (0, "%q+D redeclared without dllimport attribute "
6335 "after being referenced with dll linkage", new_tree);
6336 /* If we have used a variable's address with dllimport linkage,
6337 keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the
6338 decl may already have had TREE_CONSTANT computed.
6339 We still remove the attribute so that assembler code refers
6340 to '&foo rather than '_imp__foo'. */
6341 if (TREE_CODE (old) == VAR_DECL && TREE_ADDRESSABLE (old))
6342 DECL_DLLIMPORT_P (new_tree) = 1;
6343 }
6344
6345 /* Let an inline definition silently override the external reference,
6346 but otherwise warn about attribute inconsistency. */
6347 else if (TREE_CODE (new_tree) == VAR_DECL
6348 || !DECL_DECLARED_INLINE_P (new_tree))
6349 warning (OPT_Wattributes, "%q+D redeclared without dllimport attribute: "
6350 "previous dllimport ignored", new_tree);
6351 }
6352 else
6353 delete_dllimport_p = 0;
6354
6355 a = merge_attributes (DECL_ATTRIBUTES (old), DECL_ATTRIBUTES (new_tree));
6356
6357 if (delete_dllimport_p)
6358 a = remove_attribute ("dllimport", a);
6359
6360 return a;
6361 }
6362
6363 /* Handle a "dllimport" or "dllexport" attribute; arguments as in
6364 struct attribute_spec.handler. */
6365
6366 tree
6367 handle_dll_attribute (tree * pnode, tree name, tree args, int flags,
6368 bool *no_add_attrs)
6369 {
6370 tree node = *pnode;
6371 bool is_dllimport;
6372
6373 /* These attributes may apply to structure and union types being created,
6374 but otherwise should pass to the declaration involved. */
6375 if (!DECL_P (node))
6376 {
6377 if (flags & ((int) ATTR_FLAG_DECL_NEXT | (int) ATTR_FLAG_FUNCTION_NEXT
6378 | (int) ATTR_FLAG_ARRAY_NEXT))
6379 {
6380 *no_add_attrs = true;
6381 return tree_cons (name, args, NULL_TREE);
6382 }
6383 if (TREE_CODE (node) == RECORD_TYPE
6384 || TREE_CODE (node) == UNION_TYPE)
6385 {
6386 node = TYPE_NAME (node);
6387 if (!node)
6388 return NULL_TREE;
6389 }
6390 else
6391 {
6392 warning (OPT_Wattributes, "%qE attribute ignored",
6393 name);
6394 *no_add_attrs = true;
6395 return NULL_TREE;
6396 }
6397 }
6398
6399 if (TREE_CODE (node) != FUNCTION_DECL
6400 && TREE_CODE (node) != VAR_DECL
6401 && TREE_CODE (node) != TYPE_DECL)
6402 {
6403 *no_add_attrs = true;
6404 warning (OPT_Wattributes, "%qE attribute ignored",
6405 name);
6406 return NULL_TREE;
6407 }
6408
6409 if (TREE_CODE (node) == TYPE_DECL
6410 && TREE_CODE (TREE_TYPE (node)) != RECORD_TYPE
6411 && TREE_CODE (TREE_TYPE (node)) != UNION_TYPE)
6412 {
6413 *no_add_attrs = true;
6414 warning (OPT_Wattributes, "%qE attribute ignored",
6415 name);
6416 return NULL_TREE;
6417 }
6418
6419 is_dllimport = is_attribute_p ("dllimport", name);
6420
6421 /* Report error on dllimport ambiguities seen now before they cause
6422 any damage. */
6423 if (is_dllimport)
6424 {
6425 /* Honor any target-specific overrides. */
6426 if (!targetm.valid_dllimport_attribute_p (node))
6427 *no_add_attrs = true;
6428
6429 else if (TREE_CODE (node) == FUNCTION_DECL
6430 && DECL_DECLARED_INLINE_P (node))
6431 {
6432 warning (OPT_Wattributes, "inline function %q+D declared as "
6433 " dllimport: attribute ignored", node);
6434 *no_add_attrs = true;
6435 }
6436 /* Like MS, treat definition of dllimported variables and
6437 non-inlined functions on declaration as syntax errors. */
6438 else if (TREE_CODE (node) == FUNCTION_DECL && DECL_INITIAL (node))
6439 {
6440 error ("function %q+D definition is marked dllimport", node);
6441 *no_add_attrs = true;
6442 }
6443
6444 else if (TREE_CODE (node) == VAR_DECL)
6445 {
6446 if (DECL_INITIAL (node))
6447 {
6448 error ("variable %q+D definition is marked dllimport",
6449 node);
6450 *no_add_attrs = true;
6451 }
6452
6453 /* `extern' needn't be specified with dllimport.
6454 Specify `extern' now and hope for the best. Sigh. */
6455 DECL_EXTERNAL (node) = 1;
6456 /* Also, implicitly give dllimport'd variables declared within
6457 a function global scope, unless declared static. */
6458 if (current_function_decl != NULL_TREE && !TREE_STATIC (node))
6459 TREE_PUBLIC (node) = 1;
6460 }
6461
6462 if (*no_add_attrs == false)
6463 DECL_DLLIMPORT_P (node) = 1;
6464 }
6465 else if (TREE_CODE (node) == FUNCTION_DECL
6466 && DECL_DECLARED_INLINE_P (node)
6467 && flag_keep_inline_dllexport)
6468 /* An exported function, even if inline, must be emitted. */
6469 DECL_EXTERNAL (node) = 0;
6470
6471 /* Report error if symbol is not accessible at global scope. */
6472 if (!TREE_PUBLIC (node)
6473 && (TREE_CODE (node) == VAR_DECL
6474 || TREE_CODE (node) == FUNCTION_DECL))
6475 {
6476 error ("external linkage required for symbol %q+D because of "
6477 "%qE attribute", node, name);
6478 *no_add_attrs = true;
6479 }
6480
6481 /* A dllexport'd entity must have default visibility so that other
6482 program units (shared libraries or the main executable) can see
6483 it. A dllimport'd entity must have default visibility so that
6484 the linker knows that undefined references within this program
6485 unit can be resolved by the dynamic linker. */
6486 if (!*no_add_attrs)
6487 {
6488 if (DECL_VISIBILITY_SPECIFIED (node)
6489 && DECL_VISIBILITY (node) != VISIBILITY_DEFAULT)
6490 error ("%qE implies default visibility, but %qD has already "
6491 "been declared with a different visibility",
6492 name, node);
6493 DECL_VISIBILITY (node) = VISIBILITY_DEFAULT;
6494 DECL_VISIBILITY_SPECIFIED (node) = 1;
6495 }
6496
6497 return NULL_TREE;
6498 }
6499
6500 #endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */
6501 \f
6502 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6503 of the various TYPE_QUAL values. */
6504
6505 static void
6506 set_type_quals (tree type, int type_quals)
6507 {
6508 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6509 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6510 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6511 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6512 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6513 }
6514
6515 /* Returns true iff unqualified CAND and BASE are equivalent. */
6516
6517 bool
6518 check_base_type (const_tree cand, const_tree base)
6519 {
6520 return (TYPE_NAME (cand) == TYPE_NAME (base)
6521 /* Apparently this is needed for Objective-C. */
6522 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6523 /* Check alignment. */
6524 && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
6525 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6526 TYPE_ATTRIBUTES (base)));
6527 }
6528
6529 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6530
6531 bool
6532 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6533 {
6534 return (TYPE_QUALS (cand) == type_quals
6535 && check_base_type (cand, base));
6536 }
6537
6538 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6539
6540 static bool
6541 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6542 {
6543 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6544 && TYPE_NAME (cand) == TYPE_NAME (base)
6545 /* Apparently this is needed for Objective-C. */
6546 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6547 /* Check alignment. */
6548 && TYPE_ALIGN (cand) == align
6549 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6550 TYPE_ATTRIBUTES (base)));
6551 }
6552
6553 /* This function checks to see if TYPE matches the size one of the built-in
6554 atomic types, and returns that core atomic type. */
6555
6556 static tree
6557 find_atomic_core_type (tree type)
6558 {
6559 tree base_atomic_type;
6560
6561 /* Only handle complete types. */
6562 if (TYPE_SIZE (type) == NULL_TREE)
6563 return NULL_TREE;
6564
6565 HOST_WIDE_INT type_size = tree_to_uhwi (TYPE_SIZE (type));
6566 switch (type_size)
6567 {
6568 case 8:
6569 base_atomic_type = atomicQI_type_node;
6570 break;
6571
6572 case 16:
6573 base_atomic_type = atomicHI_type_node;
6574 break;
6575
6576 case 32:
6577 base_atomic_type = atomicSI_type_node;
6578 break;
6579
6580 case 64:
6581 base_atomic_type = atomicDI_type_node;
6582 break;
6583
6584 case 128:
6585 base_atomic_type = atomicTI_type_node;
6586 break;
6587
6588 default:
6589 base_atomic_type = NULL_TREE;
6590 }
6591
6592 return base_atomic_type;
6593 }
6594
6595 /* Return a version of the TYPE, qualified as indicated by the
6596 TYPE_QUALS, if one exists. If no qualified version exists yet,
6597 return NULL_TREE. */
6598
6599 tree
6600 get_qualified_type (tree type, int type_quals)
6601 {
6602 tree t;
6603
6604 if (TYPE_QUALS (type) == type_quals)
6605 return type;
6606
6607 /* Search the chain of variants to see if there is already one there just
6608 like the one we need to have. If so, use that existing one. We must
6609 preserve the TYPE_NAME, since there is code that depends on this. */
6610 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6611 if (check_qualified_type (t, type, type_quals))
6612 return t;
6613
6614 return NULL_TREE;
6615 }
6616
6617 /* Like get_qualified_type, but creates the type if it does not
6618 exist. This function never returns NULL_TREE. */
6619
6620 tree
6621 build_qualified_type (tree type, int type_quals)
6622 {
6623 tree t;
6624
6625 /* See if we already have the appropriate qualified variant. */
6626 t = get_qualified_type (type, type_quals);
6627
6628 /* If not, build it. */
6629 if (!t)
6630 {
6631 t = build_variant_type_copy (type);
6632 set_type_quals (t, type_quals);
6633
6634 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6635 {
6636 /* See if this object can map to a basic atomic type. */
6637 tree atomic_type = find_atomic_core_type (type);
6638 if (atomic_type)
6639 {
6640 /* Ensure the alignment of this type is compatible with
6641 the required alignment of the atomic type. */
6642 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6643 SET_TYPE_ALIGN (t, TYPE_ALIGN (atomic_type));
6644 }
6645 }
6646
6647 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6648 /* Propagate structural equality. */
6649 SET_TYPE_STRUCTURAL_EQUALITY (t);
6650 else if (TYPE_CANONICAL (type) != type)
6651 /* Build the underlying canonical type, since it is different
6652 from TYPE. */
6653 {
6654 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6655 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6656 }
6657 else
6658 /* T is its own canonical type. */
6659 TYPE_CANONICAL (t) = t;
6660
6661 }
6662
6663 return t;
6664 }
6665
6666 /* Create a variant of type T with alignment ALIGN. */
6667
6668 tree
6669 build_aligned_type (tree type, unsigned int align)
6670 {
6671 tree t;
6672
6673 if (TYPE_PACKED (type)
6674 || TYPE_ALIGN (type) == align)
6675 return type;
6676
6677 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6678 if (check_aligned_type (t, type, align))
6679 return t;
6680
6681 t = build_variant_type_copy (type);
6682 SET_TYPE_ALIGN (t, align);
6683
6684 return t;
6685 }
6686
6687 /* Create a new distinct copy of TYPE. The new type is made its own
6688 MAIN_VARIANT. If TYPE requires structural equality checks, the
6689 resulting type requires structural equality checks; otherwise, its
6690 TYPE_CANONICAL points to itself. */
6691
6692 tree
6693 build_distinct_type_copy (tree type)
6694 {
6695 tree t = copy_node (type);
6696
6697 TYPE_POINTER_TO (t) = 0;
6698 TYPE_REFERENCE_TO (t) = 0;
6699
6700 /* Set the canonical type either to a new equivalence class, or
6701 propagate the need for structural equality checks. */
6702 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6703 SET_TYPE_STRUCTURAL_EQUALITY (t);
6704 else
6705 TYPE_CANONICAL (t) = t;
6706
6707 /* Make it its own variant. */
6708 TYPE_MAIN_VARIANT (t) = t;
6709 TYPE_NEXT_VARIANT (t) = 0;
6710
6711 /* We do not record methods in type copies nor variants
6712 so we do not need to keep them up to date when new method
6713 is inserted. */
6714 if (RECORD_OR_UNION_TYPE_P (t))
6715 TYPE_METHODS (t) = NULL_TREE;
6716
6717 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6718 whose TREE_TYPE is not t. This can also happen in the Ada
6719 frontend when using subtypes. */
6720
6721 return t;
6722 }
6723
6724 /* Create a new variant of TYPE, equivalent but distinct. This is so
6725 the caller can modify it. TYPE_CANONICAL for the return type will
6726 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6727 are considered equal by the language itself (or that both types
6728 require structural equality checks). */
6729
6730 tree
6731 build_variant_type_copy (tree type)
6732 {
6733 tree t, m = TYPE_MAIN_VARIANT (type);
6734
6735 t = build_distinct_type_copy (type);
6736
6737 /* Since we're building a variant, assume that it is a non-semantic
6738 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6739 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6740 /* Type variants have no alias set defined. */
6741 TYPE_ALIAS_SET (t) = -1;
6742
6743 /* Add the new type to the chain of variants of TYPE. */
6744 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6745 TYPE_NEXT_VARIANT (m) = t;
6746 TYPE_MAIN_VARIANT (t) = m;
6747
6748 return t;
6749 }
6750 \f
6751 /* Return true if the from tree in both tree maps are equal. */
6752
6753 int
6754 tree_map_base_eq (const void *va, const void *vb)
6755 {
6756 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6757 *const b = (const struct tree_map_base *) vb;
6758 return (a->from == b->from);
6759 }
6760
6761 /* Hash a from tree in a tree_base_map. */
6762
6763 unsigned int
6764 tree_map_base_hash (const void *item)
6765 {
6766 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6767 }
6768
6769 /* Return true if this tree map structure is marked for garbage collection
6770 purposes. We simply return true if the from tree is marked, so that this
6771 structure goes away when the from tree goes away. */
6772
6773 int
6774 tree_map_base_marked_p (const void *p)
6775 {
6776 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6777 }
6778
6779 /* Hash a from tree in a tree_map. */
6780
6781 unsigned int
6782 tree_map_hash (const void *item)
6783 {
6784 return (((const struct tree_map *) item)->hash);
6785 }
6786
6787 /* Hash a from tree in a tree_decl_map. */
6788
6789 unsigned int
6790 tree_decl_map_hash (const void *item)
6791 {
6792 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6793 }
6794
6795 /* Return the initialization priority for DECL. */
6796
6797 priority_type
6798 decl_init_priority_lookup (tree decl)
6799 {
6800 symtab_node *snode = symtab_node::get (decl);
6801
6802 if (!snode)
6803 return DEFAULT_INIT_PRIORITY;
6804 return
6805 snode->get_init_priority ();
6806 }
6807
6808 /* Return the finalization priority for DECL. */
6809
6810 priority_type
6811 decl_fini_priority_lookup (tree decl)
6812 {
6813 cgraph_node *node = cgraph_node::get (decl);
6814
6815 if (!node)
6816 return DEFAULT_INIT_PRIORITY;
6817 return
6818 node->get_fini_priority ();
6819 }
6820
6821 /* Set the initialization priority for DECL to PRIORITY. */
6822
6823 void
6824 decl_init_priority_insert (tree decl, priority_type priority)
6825 {
6826 struct symtab_node *snode;
6827
6828 if (priority == DEFAULT_INIT_PRIORITY)
6829 {
6830 snode = symtab_node::get (decl);
6831 if (!snode)
6832 return;
6833 }
6834 else if (TREE_CODE (decl) == VAR_DECL)
6835 snode = varpool_node::get_create (decl);
6836 else
6837 snode = cgraph_node::get_create (decl);
6838 snode->set_init_priority (priority);
6839 }
6840
6841 /* Set the finalization priority for DECL to PRIORITY. */
6842
6843 void
6844 decl_fini_priority_insert (tree decl, priority_type priority)
6845 {
6846 struct cgraph_node *node;
6847
6848 if (priority == DEFAULT_INIT_PRIORITY)
6849 {
6850 node = cgraph_node::get (decl);
6851 if (!node)
6852 return;
6853 }
6854 else
6855 node = cgraph_node::get_create (decl);
6856 node->set_fini_priority (priority);
6857 }
6858
6859 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6860
6861 static void
6862 print_debug_expr_statistics (void)
6863 {
6864 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6865 (long) debug_expr_for_decl->size (),
6866 (long) debug_expr_for_decl->elements (),
6867 debug_expr_for_decl->collisions ());
6868 }
6869
6870 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6871
6872 static void
6873 print_value_expr_statistics (void)
6874 {
6875 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6876 (long) value_expr_for_decl->size (),
6877 (long) value_expr_for_decl->elements (),
6878 value_expr_for_decl->collisions ());
6879 }
6880
6881 /* Lookup a debug expression for FROM, and return it if we find one. */
6882
6883 tree
6884 decl_debug_expr_lookup (tree from)
6885 {
6886 struct tree_decl_map *h, in;
6887 in.base.from = from;
6888
6889 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6890 if (h)
6891 return h->to;
6892 return NULL_TREE;
6893 }
6894
6895 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6896
6897 void
6898 decl_debug_expr_insert (tree from, tree to)
6899 {
6900 struct tree_decl_map *h;
6901
6902 h = ggc_alloc<tree_decl_map> ();
6903 h->base.from = from;
6904 h->to = to;
6905 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6906 }
6907
6908 /* Lookup a value expression for FROM, and return it if we find one. */
6909
6910 tree
6911 decl_value_expr_lookup (tree from)
6912 {
6913 struct tree_decl_map *h, in;
6914 in.base.from = from;
6915
6916 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6917 if (h)
6918 return h->to;
6919 return NULL_TREE;
6920 }
6921
6922 /* Insert a mapping FROM->TO in the value expression hashtable. */
6923
6924 void
6925 decl_value_expr_insert (tree from, tree to)
6926 {
6927 struct tree_decl_map *h;
6928
6929 h = ggc_alloc<tree_decl_map> ();
6930 h->base.from = from;
6931 h->to = to;
6932 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6933 }
6934
6935 /* Lookup a vector of debug arguments for FROM, and return it if we
6936 find one. */
6937
6938 vec<tree, va_gc> **
6939 decl_debug_args_lookup (tree from)
6940 {
6941 struct tree_vec_map *h, in;
6942
6943 if (!DECL_HAS_DEBUG_ARGS_P (from))
6944 return NULL;
6945 gcc_checking_assert (debug_args_for_decl != NULL);
6946 in.base.from = from;
6947 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
6948 if (h)
6949 return &h->to;
6950 return NULL;
6951 }
6952
6953 /* Insert a mapping FROM->empty vector of debug arguments in the value
6954 expression hashtable. */
6955
6956 vec<tree, va_gc> **
6957 decl_debug_args_insert (tree from)
6958 {
6959 struct tree_vec_map *h;
6960 tree_vec_map **loc;
6961
6962 if (DECL_HAS_DEBUG_ARGS_P (from))
6963 return decl_debug_args_lookup (from);
6964 if (debug_args_for_decl == NULL)
6965 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
6966 h = ggc_alloc<tree_vec_map> ();
6967 h->base.from = from;
6968 h->to = NULL;
6969 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
6970 *loc = h;
6971 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6972 return &h->to;
6973 }
6974
6975 /* Hashing of types so that we don't make duplicates.
6976 The entry point is `type_hash_canon'. */
6977
6978 /* Compute a hash code for a list of types (chain of TREE_LIST nodes
6979 with types in the TREE_VALUE slots), by adding the hash codes
6980 of the individual types. */
6981
6982 static void
6983 type_hash_list (const_tree list, inchash::hash &hstate)
6984 {
6985 const_tree tail;
6986
6987 for (tail = list; tail; tail = TREE_CHAIN (tail))
6988 if (TREE_VALUE (tail) != error_mark_node)
6989 hstate.add_object (TYPE_HASH (TREE_VALUE (tail)));
6990 }
6991
6992 /* These are the Hashtable callback functions. */
6993
6994 /* Returns true iff the types are equivalent. */
6995
6996 bool
6997 type_cache_hasher::equal (type_hash *a, type_hash *b)
6998 {
6999 /* First test the things that are the same for all types. */
7000 if (a->hash != b->hash
7001 || TREE_CODE (a->type) != TREE_CODE (b->type)
7002 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
7003 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
7004 TYPE_ATTRIBUTES (b->type))
7005 || (TREE_CODE (a->type) != COMPLEX_TYPE
7006 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
7007 return 0;
7008
7009 /* Be careful about comparing arrays before and after the element type
7010 has been completed; don't compare TYPE_ALIGN unless both types are
7011 complete. */
7012 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
7013 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
7014 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
7015 return 0;
7016
7017 switch (TREE_CODE (a->type))
7018 {
7019 case VOID_TYPE:
7020 case COMPLEX_TYPE:
7021 case POINTER_TYPE:
7022 case REFERENCE_TYPE:
7023 case NULLPTR_TYPE:
7024 return 1;
7025
7026 case VECTOR_TYPE:
7027 return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type);
7028
7029 case ENUMERAL_TYPE:
7030 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
7031 && !(TYPE_VALUES (a->type)
7032 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
7033 && TYPE_VALUES (b->type)
7034 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
7035 && type_list_equal (TYPE_VALUES (a->type),
7036 TYPE_VALUES (b->type))))
7037 return 0;
7038
7039 /* ... fall through ... */
7040
7041 case INTEGER_TYPE:
7042 case REAL_TYPE:
7043 case BOOLEAN_TYPE:
7044 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
7045 return false;
7046 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
7047 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
7048 TYPE_MAX_VALUE (b->type)))
7049 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
7050 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
7051 TYPE_MIN_VALUE (b->type))));
7052
7053 case FIXED_POINT_TYPE:
7054 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
7055
7056 case OFFSET_TYPE:
7057 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
7058
7059 case METHOD_TYPE:
7060 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
7061 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
7062 || (TYPE_ARG_TYPES (a->type)
7063 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
7064 && TYPE_ARG_TYPES (b->type)
7065 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
7066 && type_list_equal (TYPE_ARG_TYPES (a->type),
7067 TYPE_ARG_TYPES (b->type)))))
7068 break;
7069 return 0;
7070 case ARRAY_TYPE:
7071 return TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type);
7072
7073 case RECORD_TYPE:
7074 case UNION_TYPE:
7075 case QUAL_UNION_TYPE:
7076 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
7077 || (TYPE_FIELDS (a->type)
7078 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
7079 && TYPE_FIELDS (b->type)
7080 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
7081 && type_list_equal (TYPE_FIELDS (a->type),
7082 TYPE_FIELDS (b->type))));
7083
7084 case FUNCTION_TYPE:
7085 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
7086 || (TYPE_ARG_TYPES (a->type)
7087 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
7088 && TYPE_ARG_TYPES (b->type)
7089 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
7090 && type_list_equal (TYPE_ARG_TYPES (a->type),
7091 TYPE_ARG_TYPES (b->type))))
7092 break;
7093 return 0;
7094
7095 default:
7096 return 0;
7097 }
7098
7099 if (lang_hooks.types.type_hash_eq != NULL)
7100 return lang_hooks.types.type_hash_eq (a->type, b->type);
7101
7102 return 1;
7103 }
7104
7105 /* Given TYPE, and HASHCODE its hash code, return the canonical
7106 object for an identical type if one already exists.
7107 Otherwise, return TYPE, and record it as the canonical object.
7108
7109 To use this function, first create a type of the sort you want.
7110 Then compute its hash code from the fields of the type that
7111 make it different from other similar types.
7112 Then call this function and use the value. */
7113
7114 tree
7115 type_hash_canon (unsigned int hashcode, tree type)
7116 {
7117 type_hash in;
7118 type_hash **loc;
7119
7120 /* The hash table only contains main variants, so ensure that's what we're
7121 being passed. */
7122 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
7123
7124 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
7125 must call that routine before comparing TYPE_ALIGNs. */
7126 layout_type (type);
7127
7128 in.hash = hashcode;
7129 in.type = type;
7130
7131 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
7132 if (*loc)
7133 {
7134 tree t1 = ((type_hash *) *loc)->type;
7135 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1);
7136 free_node (type);
7137 return t1;
7138 }
7139 else
7140 {
7141 struct type_hash *h;
7142
7143 h = ggc_alloc<type_hash> ();
7144 h->hash = hashcode;
7145 h->type = type;
7146 *loc = h;
7147
7148 return type;
7149 }
7150 }
7151
7152 static void
7153 print_type_hash_statistics (void)
7154 {
7155 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
7156 (long) type_hash_table->size (),
7157 (long) type_hash_table->elements (),
7158 type_hash_table->collisions ());
7159 }
7160
7161 /* Compute a hash code for a list of attributes (chain of TREE_LIST nodes
7162 with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots),
7163 by adding the hash codes of the individual attributes. */
7164
7165 static void
7166 attribute_hash_list (const_tree list, inchash::hash &hstate)
7167 {
7168 const_tree tail;
7169
7170 for (tail = list; tail; tail = TREE_CHAIN (tail))
7171 /* ??? Do we want to add in TREE_VALUE too? */
7172 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (tail)));
7173 }
7174
7175 /* Given two lists of attributes, return true if list l2 is
7176 equivalent to l1. */
7177
7178 int
7179 attribute_list_equal (const_tree l1, const_tree l2)
7180 {
7181 if (l1 == l2)
7182 return 1;
7183
7184 return attribute_list_contained (l1, l2)
7185 && attribute_list_contained (l2, l1);
7186 }
7187
7188 /* Given two lists of attributes, return true if list L2 is
7189 completely contained within L1. */
7190 /* ??? This would be faster if attribute names were stored in a canonicalized
7191 form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method
7192 must be used to show these elements are equivalent (which they are). */
7193 /* ??? It's not clear that attributes with arguments will always be handled
7194 correctly. */
7195
7196 int
7197 attribute_list_contained (const_tree l1, const_tree l2)
7198 {
7199 const_tree t1, t2;
7200
7201 /* First check the obvious, maybe the lists are identical. */
7202 if (l1 == l2)
7203 return 1;
7204
7205 /* Maybe the lists are similar. */
7206 for (t1 = l1, t2 = l2;
7207 t1 != 0 && t2 != 0
7208 && get_attribute_name (t1) == get_attribute_name (t2)
7209 && TREE_VALUE (t1) == TREE_VALUE (t2);
7210 t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7211 ;
7212
7213 /* Maybe the lists are equal. */
7214 if (t1 == 0 && t2 == 0)
7215 return 1;
7216
7217 for (; t2 != 0; t2 = TREE_CHAIN (t2))
7218 {
7219 const_tree attr;
7220 /* This CONST_CAST is okay because lookup_attribute does not
7221 modify its argument and the return value is assigned to a
7222 const_tree. */
7223 for (attr = lookup_ident_attribute (get_attribute_name (t2),
7224 CONST_CAST_TREE (l1));
7225 attr != NULL_TREE && !attribute_value_equal (t2, attr);
7226 attr = lookup_ident_attribute (get_attribute_name (t2),
7227 TREE_CHAIN (attr)))
7228 ;
7229
7230 if (attr == NULL_TREE)
7231 return 0;
7232 }
7233
7234 return 1;
7235 }
7236
7237 /* Given two lists of types
7238 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
7239 return 1 if the lists contain the same types in the same order.
7240 Also, the TREE_PURPOSEs must match. */
7241
7242 int
7243 type_list_equal (const_tree l1, const_tree l2)
7244 {
7245 const_tree t1, t2;
7246
7247 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7248 if (TREE_VALUE (t1) != TREE_VALUE (t2)
7249 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
7250 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
7251 && (TREE_TYPE (TREE_PURPOSE (t1))
7252 == TREE_TYPE (TREE_PURPOSE (t2))))))
7253 return 0;
7254
7255 return t1 == t2;
7256 }
7257
7258 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
7259 given by TYPE. If the argument list accepts variable arguments,
7260 then this function counts only the ordinary arguments. */
7261
7262 int
7263 type_num_arguments (const_tree type)
7264 {
7265 int i = 0;
7266 tree t;
7267
7268 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
7269 /* If the function does not take a variable number of arguments,
7270 the last element in the list will have type `void'. */
7271 if (VOID_TYPE_P (TREE_VALUE (t)))
7272 break;
7273 else
7274 ++i;
7275
7276 return i;
7277 }
7278
7279 /* Nonzero if integer constants T1 and T2
7280 represent the same constant value. */
7281
7282 int
7283 tree_int_cst_equal (const_tree t1, const_tree t2)
7284 {
7285 if (t1 == t2)
7286 return 1;
7287
7288 if (t1 == 0 || t2 == 0)
7289 return 0;
7290
7291 if (TREE_CODE (t1) == INTEGER_CST
7292 && TREE_CODE (t2) == INTEGER_CST
7293 && wi::to_widest (t1) == wi::to_widest (t2))
7294 return 1;
7295
7296 return 0;
7297 }
7298
7299 /* Return true if T is an INTEGER_CST whose numerical value (extended
7300 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
7301
7302 bool
7303 tree_fits_shwi_p (const_tree t)
7304 {
7305 return (t != NULL_TREE
7306 && TREE_CODE (t) == INTEGER_CST
7307 && wi::fits_shwi_p (wi::to_widest (t)));
7308 }
7309
7310 /* Return true if T is an INTEGER_CST whose numerical value (extended
7311 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
7312
7313 bool
7314 tree_fits_uhwi_p (const_tree t)
7315 {
7316 return (t != NULL_TREE
7317 && TREE_CODE (t) == INTEGER_CST
7318 && wi::fits_uhwi_p (wi::to_widest (t)));
7319 }
7320
7321 /* T is an INTEGER_CST whose numerical value (extended according to
7322 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
7323 HOST_WIDE_INT. */
7324
7325 HOST_WIDE_INT
7326 tree_to_shwi (const_tree t)
7327 {
7328 gcc_assert (tree_fits_shwi_p (t));
7329 return TREE_INT_CST_LOW (t);
7330 }
7331
7332 /* T is an INTEGER_CST whose numerical value (extended according to
7333 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7334 HOST_WIDE_INT. */
7335
7336 unsigned HOST_WIDE_INT
7337 tree_to_uhwi (const_tree t)
7338 {
7339 gcc_assert (tree_fits_uhwi_p (t));
7340 return TREE_INT_CST_LOW (t);
7341 }
7342
7343 /* Return the most significant (sign) bit of T. */
7344
7345 int
7346 tree_int_cst_sign_bit (const_tree t)
7347 {
7348 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7349
7350 return wi::extract_uhwi (t, bitno, 1);
7351 }
7352
7353 /* Return an indication of the sign of the integer constant T.
7354 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7355 Note that -1 will never be returned if T's type is unsigned. */
7356
7357 int
7358 tree_int_cst_sgn (const_tree t)
7359 {
7360 if (wi::eq_p (t, 0))
7361 return 0;
7362 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7363 return 1;
7364 else if (wi::neg_p (t))
7365 return -1;
7366 else
7367 return 1;
7368 }
7369
7370 /* Return the minimum number of bits needed to represent VALUE in a
7371 signed or unsigned type, UNSIGNEDP says which. */
7372
7373 unsigned int
7374 tree_int_cst_min_precision (tree value, signop sgn)
7375 {
7376 /* If the value is negative, compute its negative minus 1. The latter
7377 adjustment is because the absolute value of the largest negative value
7378 is one larger than the largest positive value. This is equivalent to
7379 a bit-wise negation, so use that operation instead. */
7380
7381 if (tree_int_cst_sgn (value) < 0)
7382 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7383
7384 /* Return the number of bits needed, taking into account the fact
7385 that we need one more bit for a signed than unsigned type.
7386 If value is 0 or -1, the minimum precision is 1 no matter
7387 whether unsignedp is true or false. */
7388
7389 if (integer_zerop (value))
7390 return 1;
7391 else
7392 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7393 }
7394
7395 /* Return truthvalue of whether T1 is the same tree structure as T2.
7396 Return 1 if they are the same.
7397 Return 0 if they are understandably different.
7398 Return -1 if either contains tree structure not understood by
7399 this function. */
7400
7401 int
7402 simple_cst_equal (const_tree t1, const_tree t2)
7403 {
7404 enum tree_code code1, code2;
7405 int cmp;
7406 int i;
7407
7408 if (t1 == t2)
7409 return 1;
7410 if (t1 == 0 || t2 == 0)
7411 return 0;
7412
7413 code1 = TREE_CODE (t1);
7414 code2 = TREE_CODE (t2);
7415
7416 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7417 {
7418 if (CONVERT_EXPR_CODE_P (code2)
7419 || code2 == NON_LVALUE_EXPR)
7420 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7421 else
7422 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7423 }
7424
7425 else if (CONVERT_EXPR_CODE_P (code2)
7426 || code2 == NON_LVALUE_EXPR)
7427 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7428
7429 if (code1 != code2)
7430 return 0;
7431
7432 switch (code1)
7433 {
7434 case INTEGER_CST:
7435 return wi::to_widest (t1) == wi::to_widest (t2);
7436
7437 case REAL_CST:
7438 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
7439
7440 case FIXED_CST:
7441 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7442
7443 case STRING_CST:
7444 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7445 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7446 TREE_STRING_LENGTH (t1)));
7447
7448 case CONSTRUCTOR:
7449 {
7450 unsigned HOST_WIDE_INT idx;
7451 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7452 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7453
7454 if (vec_safe_length (v1) != vec_safe_length (v2))
7455 return false;
7456
7457 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7458 /* ??? Should we handle also fields here? */
7459 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7460 return false;
7461 return true;
7462 }
7463
7464 case SAVE_EXPR:
7465 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7466
7467 case CALL_EXPR:
7468 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7469 if (cmp <= 0)
7470 return cmp;
7471 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7472 return 0;
7473 {
7474 const_tree arg1, arg2;
7475 const_call_expr_arg_iterator iter1, iter2;
7476 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7477 arg2 = first_const_call_expr_arg (t2, &iter2);
7478 arg1 && arg2;
7479 arg1 = next_const_call_expr_arg (&iter1),
7480 arg2 = next_const_call_expr_arg (&iter2))
7481 {
7482 cmp = simple_cst_equal (arg1, arg2);
7483 if (cmp <= 0)
7484 return cmp;
7485 }
7486 return arg1 == arg2;
7487 }
7488
7489 case TARGET_EXPR:
7490 /* Special case: if either target is an unallocated VAR_DECL,
7491 it means that it's going to be unified with whatever the
7492 TARGET_EXPR is really supposed to initialize, so treat it
7493 as being equivalent to anything. */
7494 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7495 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7496 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7497 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7498 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7499 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7500 cmp = 1;
7501 else
7502 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7503
7504 if (cmp <= 0)
7505 return cmp;
7506
7507 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7508
7509 case WITH_CLEANUP_EXPR:
7510 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7511 if (cmp <= 0)
7512 return cmp;
7513
7514 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7515
7516 case COMPONENT_REF:
7517 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7518 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7519
7520 return 0;
7521
7522 case VAR_DECL:
7523 case PARM_DECL:
7524 case CONST_DECL:
7525 case FUNCTION_DECL:
7526 return 0;
7527
7528 default:
7529 break;
7530 }
7531
7532 /* This general rule works for most tree codes. All exceptions should be
7533 handled above. If this is a language-specific tree code, we can't
7534 trust what might be in the operand, so say we don't know
7535 the situation. */
7536 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7537 return -1;
7538
7539 switch (TREE_CODE_CLASS (code1))
7540 {
7541 case tcc_unary:
7542 case tcc_binary:
7543 case tcc_comparison:
7544 case tcc_expression:
7545 case tcc_reference:
7546 case tcc_statement:
7547 cmp = 1;
7548 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7549 {
7550 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7551 if (cmp <= 0)
7552 return cmp;
7553 }
7554
7555 return cmp;
7556
7557 default:
7558 return -1;
7559 }
7560 }
7561
7562 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7563 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7564 than U, respectively. */
7565
7566 int
7567 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7568 {
7569 if (tree_int_cst_sgn (t) < 0)
7570 return -1;
7571 else if (!tree_fits_uhwi_p (t))
7572 return 1;
7573 else if (TREE_INT_CST_LOW (t) == u)
7574 return 0;
7575 else if (TREE_INT_CST_LOW (t) < u)
7576 return -1;
7577 else
7578 return 1;
7579 }
7580
7581 /* Return true if SIZE represents a constant size that is in bounds of
7582 what the middle-end and the backend accepts (covering not more than
7583 half of the address-space). */
7584
7585 bool
7586 valid_constant_size_p (const_tree size)
7587 {
7588 if (! tree_fits_uhwi_p (size)
7589 || TREE_OVERFLOW (size)
7590 || tree_int_cst_sign_bit (size) != 0)
7591 return false;
7592 return true;
7593 }
7594
7595 /* Return the precision of the type, or for a complex or vector type the
7596 precision of the type of its elements. */
7597
7598 unsigned int
7599 element_precision (const_tree type)
7600 {
7601 if (!TYPE_P (type))
7602 type = TREE_TYPE (type);
7603 enum tree_code code = TREE_CODE (type);
7604 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7605 type = TREE_TYPE (type);
7606
7607 return TYPE_PRECISION (type);
7608 }
7609
7610 /* Return true if CODE represents an associative tree code. Otherwise
7611 return false. */
7612 bool
7613 associative_tree_code (enum tree_code code)
7614 {
7615 switch (code)
7616 {
7617 case BIT_IOR_EXPR:
7618 case BIT_AND_EXPR:
7619 case BIT_XOR_EXPR:
7620 case PLUS_EXPR:
7621 case MULT_EXPR:
7622 case MIN_EXPR:
7623 case MAX_EXPR:
7624 return true;
7625
7626 default:
7627 break;
7628 }
7629 return false;
7630 }
7631
7632 /* Return true if CODE represents a commutative tree code. Otherwise
7633 return false. */
7634 bool
7635 commutative_tree_code (enum tree_code code)
7636 {
7637 switch (code)
7638 {
7639 case PLUS_EXPR:
7640 case MULT_EXPR:
7641 case MULT_HIGHPART_EXPR:
7642 case MIN_EXPR:
7643 case MAX_EXPR:
7644 case BIT_IOR_EXPR:
7645 case BIT_XOR_EXPR:
7646 case BIT_AND_EXPR:
7647 case NE_EXPR:
7648 case EQ_EXPR:
7649 case UNORDERED_EXPR:
7650 case ORDERED_EXPR:
7651 case UNEQ_EXPR:
7652 case LTGT_EXPR:
7653 case TRUTH_AND_EXPR:
7654 case TRUTH_XOR_EXPR:
7655 case TRUTH_OR_EXPR:
7656 case WIDEN_MULT_EXPR:
7657 case VEC_WIDEN_MULT_HI_EXPR:
7658 case VEC_WIDEN_MULT_LO_EXPR:
7659 case VEC_WIDEN_MULT_EVEN_EXPR:
7660 case VEC_WIDEN_MULT_ODD_EXPR:
7661 return true;
7662
7663 default:
7664 break;
7665 }
7666 return false;
7667 }
7668
7669 /* Return true if CODE represents a ternary tree code for which the
7670 first two operands are commutative. Otherwise return false. */
7671 bool
7672 commutative_ternary_tree_code (enum tree_code code)
7673 {
7674 switch (code)
7675 {
7676 case WIDEN_MULT_PLUS_EXPR:
7677 case WIDEN_MULT_MINUS_EXPR:
7678 case DOT_PROD_EXPR:
7679 case FMA_EXPR:
7680 return true;
7681
7682 default:
7683 break;
7684 }
7685 return false;
7686 }
7687
7688 /* Returns true if CODE can overflow. */
7689
7690 bool
7691 operation_can_overflow (enum tree_code code)
7692 {
7693 switch (code)
7694 {
7695 case PLUS_EXPR:
7696 case MINUS_EXPR:
7697 case MULT_EXPR:
7698 case LSHIFT_EXPR:
7699 /* Can overflow in various ways. */
7700 return true;
7701 case TRUNC_DIV_EXPR:
7702 case EXACT_DIV_EXPR:
7703 case FLOOR_DIV_EXPR:
7704 case CEIL_DIV_EXPR:
7705 /* For INT_MIN / -1. */
7706 return true;
7707 case NEGATE_EXPR:
7708 case ABS_EXPR:
7709 /* For -INT_MIN. */
7710 return true;
7711 default:
7712 /* These operators cannot overflow. */
7713 return false;
7714 }
7715 }
7716
7717 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
7718 ftrapv doesn't generate trapping insns for CODE. */
7719
7720 bool
7721 operation_no_trapping_overflow (tree type, enum tree_code code)
7722 {
7723 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type));
7724
7725 /* We don't generate instructions that trap on overflow for complex or vector
7726 types. */
7727 if (!INTEGRAL_TYPE_P (type))
7728 return true;
7729
7730 if (!TYPE_OVERFLOW_TRAPS (type))
7731 return true;
7732
7733 switch (code)
7734 {
7735 case PLUS_EXPR:
7736 case MINUS_EXPR:
7737 case MULT_EXPR:
7738 case NEGATE_EXPR:
7739 case ABS_EXPR:
7740 /* These operators can overflow, and -ftrapv generates trapping code for
7741 these. */
7742 return false;
7743 case TRUNC_DIV_EXPR:
7744 case EXACT_DIV_EXPR:
7745 case FLOOR_DIV_EXPR:
7746 case CEIL_DIV_EXPR:
7747 case LSHIFT_EXPR:
7748 /* These operators can overflow, but -ftrapv does not generate trapping
7749 code for these. */
7750 return true;
7751 default:
7752 /* These operators cannot overflow. */
7753 return true;
7754 }
7755 }
7756
7757 namespace inchash
7758 {
7759
7760 /* Generate a hash value for an expression. This can be used iteratively
7761 by passing a previous result as the HSTATE argument.
7762
7763 This function is intended to produce the same hash for expressions which
7764 would compare equal using operand_equal_p. */
7765 void
7766 add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
7767 {
7768 int i;
7769 enum tree_code code;
7770 enum tree_code_class tclass;
7771
7772 if (t == NULL_TREE)
7773 {
7774 hstate.merge_hash (0);
7775 return;
7776 }
7777
7778 if (!(flags & OEP_ADDRESS_OF))
7779 STRIP_NOPS (t);
7780
7781 code = TREE_CODE (t);
7782
7783 switch (code)
7784 {
7785 /* Alas, constants aren't shared, so we can't rely on pointer
7786 identity. */
7787 case VOID_CST:
7788 hstate.merge_hash (0);
7789 return;
7790 case INTEGER_CST:
7791 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
7792 for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
7793 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
7794 return;
7795 case REAL_CST:
7796 {
7797 unsigned int val2;
7798 if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
7799 val2 = rvc_zero;
7800 else
7801 val2 = real_hash (TREE_REAL_CST_PTR (t));
7802 hstate.merge_hash (val2);
7803 return;
7804 }
7805 case FIXED_CST:
7806 {
7807 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7808 hstate.merge_hash (val2);
7809 return;
7810 }
7811 case STRING_CST:
7812 hstate.add ((const void *) TREE_STRING_POINTER (t),
7813 TREE_STRING_LENGTH (t));
7814 return;
7815 case COMPLEX_CST:
7816 inchash::add_expr (TREE_REALPART (t), hstate, flags);
7817 inchash::add_expr (TREE_IMAGPART (t), hstate, flags);
7818 return;
7819 case VECTOR_CST:
7820 {
7821 unsigned i;
7822 for (i = 0; i < VECTOR_CST_NELTS (t); ++i)
7823 inchash::add_expr (VECTOR_CST_ELT (t, i), hstate, flags);
7824 return;
7825 }
7826 case SSA_NAME:
7827 /* We can just compare by pointer. */
7828 hstate.add_wide_int (SSA_NAME_VERSION (t));
7829 return;
7830 case PLACEHOLDER_EXPR:
7831 /* The node itself doesn't matter. */
7832 return;
7833 case BLOCK:
7834 case OMP_CLAUSE:
7835 /* Ignore. */
7836 return;
7837 case TREE_LIST:
7838 /* A list of expressions, for a CALL_EXPR or as the elements of a
7839 VECTOR_CST. */
7840 for (; t; t = TREE_CHAIN (t))
7841 inchash::add_expr (TREE_VALUE (t), hstate, flags);
7842 return;
7843 case CONSTRUCTOR:
7844 {
7845 unsigned HOST_WIDE_INT idx;
7846 tree field, value;
7847 flags &= ~OEP_ADDRESS_OF;
7848 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7849 {
7850 inchash::add_expr (field, hstate, flags);
7851 inchash::add_expr (value, hstate, flags);
7852 }
7853 return;
7854 }
7855 case STATEMENT_LIST:
7856 {
7857 tree_stmt_iterator i;
7858 for (i = tsi_start (CONST_CAST_TREE (t));
7859 !tsi_end_p (i); tsi_next (&i))
7860 inchash::add_expr (tsi_stmt (i), hstate, flags);
7861 return;
7862 }
7863 case FUNCTION_DECL:
7864 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7865 Otherwise nodes that compare equal according to operand_equal_p might
7866 get different hash codes. However, don't do this for machine specific
7867 or front end builtins, since the function code is overloaded in those
7868 cases. */
7869 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7870 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7871 {
7872 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7873 code = TREE_CODE (t);
7874 }
7875 /* FALL THROUGH */
7876 default:
7877 tclass = TREE_CODE_CLASS (code);
7878
7879 if (tclass == tcc_declaration)
7880 {
7881 /* DECL's have a unique ID */
7882 hstate.add_wide_int (DECL_UID (t));
7883 }
7884 else if (tclass == tcc_comparison && !commutative_tree_code (code))
7885 {
7886 /* For comparisons that can be swapped, use the lower
7887 tree code. */
7888 enum tree_code ccode = swap_tree_comparison (code);
7889 if (code < ccode)
7890 ccode = code;
7891 hstate.add_object (ccode);
7892 inchash::add_expr (TREE_OPERAND (t, ccode != code), hstate, flags);
7893 inchash::add_expr (TREE_OPERAND (t, ccode == code), hstate, flags);
7894 }
7895 else if (CONVERT_EXPR_CODE_P (code))
7896 {
7897 /* NOP_EXPR and CONVERT_EXPR are considered equal by
7898 operand_equal_p. */
7899 enum tree_code ccode = NOP_EXPR;
7900 hstate.add_object (ccode);
7901
7902 /* Don't hash the type, that can lead to having nodes which
7903 compare equal according to operand_equal_p, but which
7904 have different hash codes. Make sure to include signedness
7905 in the hash computation. */
7906 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7907 inchash::add_expr (TREE_OPERAND (t, 0), hstate, flags);
7908 }
7909 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
7910 else if (code == MEM_REF
7911 && (flags & OEP_ADDRESS_OF) != 0
7912 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
7913 && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
7914 && integer_zerop (TREE_OPERAND (t, 1)))
7915 inchash::add_expr (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
7916 hstate, flags);
7917 /* Don't ICE on FE specific trees, or their arguments etc.
7918 during operand_equal_p hash verification. */
7919 else if (!IS_EXPR_CODE_CLASS (tclass))
7920 gcc_assert (flags & OEP_HASH_CHECK);
7921 else
7922 {
7923 unsigned int sflags = flags;
7924
7925 hstate.add_object (code);
7926
7927 switch (code)
7928 {
7929 case ADDR_EXPR:
7930 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
7931 flags |= OEP_ADDRESS_OF;
7932 sflags = flags;
7933 break;
7934
7935 case INDIRECT_REF:
7936 case MEM_REF:
7937 case TARGET_MEM_REF:
7938 flags &= ~OEP_ADDRESS_OF;
7939 sflags = flags;
7940 break;
7941
7942 case ARRAY_REF:
7943 case ARRAY_RANGE_REF:
7944 case COMPONENT_REF:
7945 case BIT_FIELD_REF:
7946 sflags &= ~OEP_ADDRESS_OF;
7947 break;
7948
7949 case COND_EXPR:
7950 flags &= ~OEP_ADDRESS_OF;
7951 break;
7952
7953 case FMA_EXPR:
7954 case WIDEN_MULT_PLUS_EXPR:
7955 case WIDEN_MULT_MINUS_EXPR:
7956 {
7957 /* The multiplication operands are commutative. */
7958 inchash::hash one, two;
7959 inchash::add_expr (TREE_OPERAND (t, 0), one, flags);
7960 inchash::add_expr (TREE_OPERAND (t, 1), two, flags);
7961 hstate.add_commutative (one, two);
7962 inchash::add_expr (TREE_OPERAND (t, 2), two, flags);
7963 return;
7964 }
7965
7966 case CALL_EXPR:
7967 if (CALL_EXPR_FN (t) == NULL_TREE)
7968 hstate.add_int (CALL_EXPR_IFN (t));
7969 break;
7970
7971 case TARGET_EXPR:
7972 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
7973 Usually different TARGET_EXPRs just should use
7974 different temporaries in their slots. */
7975 inchash::add_expr (TARGET_EXPR_SLOT (t), hstate, flags);
7976 return;
7977
7978 default:
7979 break;
7980 }
7981
7982 /* Don't hash the type, that can lead to having nodes which
7983 compare equal according to operand_equal_p, but which
7984 have different hash codes. */
7985 if (code == NON_LVALUE_EXPR)
7986 {
7987 /* Make sure to include signness in the hash computation. */
7988 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7989 inchash::add_expr (TREE_OPERAND (t, 0), hstate, flags);
7990 }
7991
7992 else if (commutative_tree_code (code))
7993 {
7994 /* It's a commutative expression. We want to hash it the same
7995 however it appears. We do this by first hashing both operands
7996 and then rehashing based on the order of their independent
7997 hashes. */
7998 inchash::hash one, two;
7999 inchash::add_expr (TREE_OPERAND (t, 0), one, flags);
8000 inchash::add_expr (TREE_OPERAND (t, 1), two, flags);
8001 hstate.add_commutative (one, two);
8002 }
8003 else
8004 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
8005 inchash::add_expr (TREE_OPERAND (t, i), hstate,
8006 i == 0 ? flags : sflags);
8007 }
8008 return;
8009 }
8010 }
8011
8012 }
8013
8014 /* Constructors for pointer, array and function types.
8015 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
8016 constructed by language-dependent code, not here.) */
8017
8018 /* Construct, lay out and return the type of pointers to TO_TYPE with
8019 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
8020 reference all of memory. If such a type has already been
8021 constructed, reuse it. */
8022
8023 tree
8024 build_pointer_type_for_mode (tree to_type, machine_mode mode,
8025 bool can_alias_all)
8026 {
8027 tree t;
8028 bool could_alias = can_alias_all;
8029
8030 if (to_type == error_mark_node)
8031 return error_mark_node;
8032
8033 /* If the pointed-to type has the may_alias attribute set, force
8034 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
8035 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
8036 can_alias_all = true;
8037
8038 /* In some cases, languages will have things that aren't a POINTER_TYPE
8039 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
8040 In that case, return that type without regard to the rest of our
8041 operands.
8042
8043 ??? This is a kludge, but consistent with the way this function has
8044 always operated and there doesn't seem to be a good way to avoid this
8045 at the moment. */
8046 if (TYPE_POINTER_TO (to_type) != 0
8047 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
8048 return TYPE_POINTER_TO (to_type);
8049
8050 /* First, if we already have a type for pointers to TO_TYPE and it's
8051 the proper mode, use it. */
8052 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
8053 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
8054 return t;
8055
8056 t = make_node (POINTER_TYPE);
8057
8058 TREE_TYPE (t) = to_type;
8059 SET_TYPE_MODE (t, mode);
8060 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
8061 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
8062 TYPE_POINTER_TO (to_type) = t;
8063
8064 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
8065 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
8066 SET_TYPE_STRUCTURAL_EQUALITY (t);
8067 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
8068 TYPE_CANONICAL (t)
8069 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
8070 mode, false);
8071
8072 /* Lay out the type. This function has many callers that are concerned
8073 with expression-construction, and this simplifies them all. */
8074 layout_type (t);
8075
8076 return t;
8077 }
8078
8079 /* By default build pointers in ptr_mode. */
8080
8081 tree
8082 build_pointer_type (tree to_type)
8083 {
8084 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
8085 : TYPE_ADDR_SPACE (to_type);
8086 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
8087 return build_pointer_type_for_mode (to_type, pointer_mode, false);
8088 }
8089
8090 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
8091
8092 tree
8093 build_reference_type_for_mode (tree to_type, machine_mode mode,
8094 bool can_alias_all)
8095 {
8096 tree t;
8097 bool could_alias = can_alias_all;
8098
8099 if (to_type == error_mark_node)
8100 return error_mark_node;
8101
8102 /* If the pointed-to type has the may_alias attribute set, force
8103 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
8104 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
8105 can_alias_all = true;
8106
8107 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
8108 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
8109 In that case, return that type without regard to the rest of our
8110 operands.
8111
8112 ??? This is a kludge, but consistent with the way this function has
8113 always operated and there doesn't seem to be a good way to avoid this
8114 at the moment. */
8115 if (TYPE_REFERENCE_TO (to_type) != 0
8116 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
8117 return TYPE_REFERENCE_TO (to_type);
8118
8119 /* First, if we already have a type for pointers to TO_TYPE and it's
8120 the proper mode, use it. */
8121 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
8122 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
8123 return t;
8124
8125 t = make_node (REFERENCE_TYPE);
8126
8127 TREE_TYPE (t) = to_type;
8128 SET_TYPE_MODE (t, mode);
8129 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
8130 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
8131 TYPE_REFERENCE_TO (to_type) = t;
8132
8133 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
8134 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
8135 SET_TYPE_STRUCTURAL_EQUALITY (t);
8136 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
8137 TYPE_CANONICAL (t)
8138 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
8139 mode, false);
8140
8141 layout_type (t);
8142
8143 return t;
8144 }
8145
8146
8147 /* Build the node for the type of references-to-TO_TYPE by default
8148 in ptr_mode. */
8149
8150 tree
8151 build_reference_type (tree to_type)
8152 {
8153 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
8154 : TYPE_ADDR_SPACE (to_type);
8155 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
8156 return build_reference_type_for_mode (to_type, pointer_mode, false);
8157 }
8158
8159 #define MAX_INT_CACHED_PREC \
8160 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
8161 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
8162
8163 /* Builds a signed or unsigned integer type of precision PRECISION.
8164 Used for C bitfields whose precision does not match that of
8165 built-in target types. */
8166 tree
8167 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
8168 int unsignedp)
8169 {
8170 tree itype, ret;
8171
8172 if (unsignedp)
8173 unsignedp = MAX_INT_CACHED_PREC + 1;
8174
8175 if (precision <= MAX_INT_CACHED_PREC)
8176 {
8177 itype = nonstandard_integer_type_cache[precision + unsignedp];
8178 if (itype)
8179 return itype;
8180 }
8181
8182 itype = make_node (INTEGER_TYPE);
8183 TYPE_PRECISION (itype) = precision;
8184
8185 if (unsignedp)
8186 fixup_unsigned_type (itype);
8187 else
8188 fixup_signed_type (itype);
8189
8190 ret = itype;
8191 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype)))
8192 ret = type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype)), itype);
8193 if (precision <= MAX_INT_CACHED_PREC)
8194 nonstandard_integer_type_cache[precision + unsignedp] = ret;
8195
8196 return ret;
8197 }
8198
8199 #define MAX_BOOL_CACHED_PREC \
8200 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
8201 static GTY(()) tree nonstandard_boolean_type_cache[MAX_BOOL_CACHED_PREC + 1];
8202
8203 /* Builds a boolean type of precision PRECISION.
8204 Used for boolean vectors to choose proper vector element size. */
8205 tree
8206 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision)
8207 {
8208 tree type;
8209
8210 if (precision <= MAX_BOOL_CACHED_PREC)
8211 {
8212 type = nonstandard_boolean_type_cache[precision];
8213 if (type)
8214 return type;
8215 }
8216
8217 type = make_node (BOOLEAN_TYPE);
8218 TYPE_PRECISION (type) = precision;
8219 fixup_signed_type (type);
8220
8221 if (precision <= MAX_INT_CACHED_PREC)
8222 nonstandard_boolean_type_cache[precision] = type;
8223
8224 return type;
8225 }
8226
8227 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
8228 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
8229 is true, reuse such a type that has already been constructed. */
8230
8231 static tree
8232 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
8233 {
8234 tree itype = make_node (INTEGER_TYPE);
8235 inchash::hash hstate;
8236
8237 TREE_TYPE (itype) = type;
8238
8239 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
8240 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
8241
8242 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
8243 SET_TYPE_MODE (itype, TYPE_MODE (type));
8244 TYPE_SIZE (itype) = TYPE_SIZE (type);
8245 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
8246 SET_TYPE_ALIGN (itype, TYPE_ALIGN (type));
8247 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
8248
8249 if (!shared)
8250 return itype;
8251
8252 if ((TYPE_MIN_VALUE (itype)
8253 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
8254 || (TYPE_MAX_VALUE (itype)
8255 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
8256 {
8257 /* Since we cannot reliably merge this type, we need to compare it using
8258 structural equality checks. */
8259 SET_TYPE_STRUCTURAL_EQUALITY (itype);
8260 return itype;
8261 }
8262
8263 inchash::add_expr (TYPE_MIN_VALUE (itype), hstate);
8264 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
8265 hstate.merge_hash (TYPE_HASH (type));
8266 itype = type_hash_canon (hstate.end (), itype);
8267
8268 return itype;
8269 }
8270
8271 /* Wrapper around build_range_type_1 with SHARED set to true. */
8272
8273 tree
8274 build_range_type (tree type, tree lowval, tree highval)
8275 {
8276 return build_range_type_1 (type, lowval, highval, true);
8277 }
8278
8279 /* Wrapper around build_range_type_1 with SHARED set to false. */
8280
8281 tree
8282 build_nonshared_range_type (tree type, tree lowval, tree highval)
8283 {
8284 return build_range_type_1 (type, lowval, highval, false);
8285 }
8286
8287 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
8288 MAXVAL should be the maximum value in the domain
8289 (one less than the length of the array).
8290
8291 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
8292 We don't enforce this limit, that is up to caller (e.g. language front end).
8293 The limit exists because the result is a signed type and we don't handle
8294 sizes that use more than one HOST_WIDE_INT. */
8295
8296 tree
8297 build_index_type (tree maxval)
8298 {
8299 return build_range_type (sizetype, size_zero_node, maxval);
8300 }
8301
8302 /* Return true if the debug information for TYPE, a subtype, should be emitted
8303 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
8304 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
8305 debug info and doesn't reflect the source code. */
8306
8307 bool
8308 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
8309 {
8310 tree base_type = TREE_TYPE (type), low, high;
8311
8312 /* Subrange types have a base type which is an integral type. */
8313 if (!INTEGRAL_TYPE_P (base_type))
8314 return false;
8315
8316 /* Get the real bounds of the subtype. */
8317 if (lang_hooks.types.get_subrange_bounds)
8318 lang_hooks.types.get_subrange_bounds (type, &low, &high);
8319 else
8320 {
8321 low = TYPE_MIN_VALUE (type);
8322 high = TYPE_MAX_VALUE (type);
8323 }
8324
8325 /* If the type and its base type have the same representation and the same
8326 name, then the type is not a subrange but a copy of the base type. */
8327 if ((TREE_CODE (base_type) == INTEGER_TYPE
8328 || TREE_CODE (base_type) == BOOLEAN_TYPE)
8329 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
8330 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
8331 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
8332 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
8333 return false;
8334
8335 if (lowval)
8336 *lowval = low;
8337 if (highval)
8338 *highval = high;
8339 return true;
8340 }
8341
8342 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
8343 and number of elements specified by the range of values of INDEX_TYPE.
8344 If SHARED is true, reuse such a type that has already been constructed. */
8345
8346 static tree
8347 build_array_type_1 (tree elt_type, tree index_type, bool shared)
8348 {
8349 tree t;
8350
8351 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
8352 {
8353 error ("arrays of functions are not meaningful");
8354 elt_type = integer_type_node;
8355 }
8356
8357 t = make_node (ARRAY_TYPE);
8358 TREE_TYPE (t) = elt_type;
8359 TYPE_DOMAIN (t) = index_type;
8360 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
8361 layout_type (t);
8362
8363 /* If the element type is incomplete at this point we get marked for
8364 structural equality. Do not record these types in the canonical
8365 type hashtable. */
8366 if (TYPE_STRUCTURAL_EQUALITY_P (t))
8367 return t;
8368
8369 if (shared)
8370 {
8371 inchash::hash hstate;
8372 hstate.add_object (TYPE_HASH (elt_type));
8373 if (index_type)
8374 hstate.add_object (TYPE_HASH (index_type));
8375 t = type_hash_canon (hstate.end (), t);
8376 }
8377
8378 if (TYPE_CANONICAL (t) == t)
8379 {
8380 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
8381 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type))
8382 || in_lto_p)
8383 SET_TYPE_STRUCTURAL_EQUALITY (t);
8384 else if (TYPE_CANONICAL (elt_type) != elt_type
8385 || (index_type && TYPE_CANONICAL (index_type) != index_type))
8386 TYPE_CANONICAL (t)
8387 = build_array_type_1 (TYPE_CANONICAL (elt_type),
8388 index_type
8389 ? TYPE_CANONICAL (index_type) : NULL_TREE,
8390 shared);
8391 }
8392
8393 return t;
8394 }
8395
8396 /* Wrapper around build_array_type_1 with SHARED set to true. */
8397
8398 tree
8399 build_array_type (tree elt_type, tree index_type)
8400 {
8401 return build_array_type_1 (elt_type, index_type, true);
8402 }
8403
8404 /* Wrapper around build_array_type_1 with SHARED set to false. */
8405
8406 tree
8407 build_nonshared_array_type (tree elt_type, tree index_type)
8408 {
8409 return build_array_type_1 (elt_type, index_type, false);
8410 }
8411
8412 /* Return a representation of ELT_TYPE[NELTS], using indices of type
8413 sizetype. */
8414
8415 tree
8416 build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
8417 {
8418 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
8419 }
8420
8421 /* Recursively examines the array elements of TYPE, until a non-array
8422 element type is found. */
8423
8424 tree
8425 strip_array_types (tree type)
8426 {
8427 while (TREE_CODE (type) == ARRAY_TYPE)
8428 type = TREE_TYPE (type);
8429
8430 return type;
8431 }
8432
8433 /* Computes the canonical argument types from the argument type list
8434 ARGTYPES.
8435
8436 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
8437 on entry to this function, or if any of the ARGTYPES are
8438 structural.
8439
8440 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
8441 true on entry to this function, or if any of the ARGTYPES are
8442 non-canonical.
8443
8444 Returns a canonical argument list, which may be ARGTYPES when the
8445 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
8446 true) or would not differ from ARGTYPES. */
8447
8448 static tree
8449 maybe_canonicalize_argtypes (tree argtypes,
8450 bool *any_structural_p,
8451 bool *any_noncanonical_p)
8452 {
8453 tree arg;
8454 bool any_noncanonical_argtypes_p = false;
8455
8456 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
8457 {
8458 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
8459 /* Fail gracefully by stating that the type is structural. */
8460 *any_structural_p = true;
8461 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
8462 *any_structural_p = true;
8463 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
8464 || TREE_PURPOSE (arg))
8465 /* If the argument has a default argument, we consider it
8466 non-canonical even though the type itself is canonical.
8467 That way, different variants of function and method types
8468 with default arguments will all point to the variant with
8469 no defaults as their canonical type. */
8470 any_noncanonical_argtypes_p = true;
8471 }
8472
8473 if (*any_structural_p)
8474 return argtypes;
8475
8476 if (any_noncanonical_argtypes_p)
8477 {
8478 /* Build the canonical list of argument types. */
8479 tree canon_argtypes = NULL_TREE;
8480 bool is_void = false;
8481
8482 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
8483 {
8484 if (arg == void_list_node)
8485 is_void = true;
8486 else
8487 canon_argtypes = tree_cons (NULL_TREE,
8488 TYPE_CANONICAL (TREE_VALUE (arg)),
8489 canon_argtypes);
8490 }
8491
8492 canon_argtypes = nreverse (canon_argtypes);
8493 if (is_void)
8494 canon_argtypes = chainon (canon_argtypes, void_list_node);
8495
8496 /* There is a non-canonical type. */
8497 *any_noncanonical_p = true;
8498 return canon_argtypes;
8499 }
8500
8501 /* The canonical argument types are the same as ARGTYPES. */
8502 return argtypes;
8503 }
8504
8505 /* Construct, lay out and return
8506 the type of functions returning type VALUE_TYPE
8507 given arguments of types ARG_TYPES.
8508 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
8509 are data type nodes for the arguments of the function.
8510 If such a type has already been constructed, reuse it. */
8511
8512 tree
8513 build_function_type (tree value_type, tree arg_types)
8514 {
8515 tree t;
8516 inchash::hash hstate;
8517 bool any_structural_p, any_noncanonical_p;
8518 tree canon_argtypes;
8519
8520 if (TREE_CODE (value_type) == FUNCTION_TYPE)
8521 {
8522 error ("function return type cannot be function");
8523 value_type = integer_type_node;
8524 }
8525
8526 /* Make a node of the sort we want. */
8527 t = make_node (FUNCTION_TYPE);
8528 TREE_TYPE (t) = value_type;
8529 TYPE_ARG_TYPES (t) = arg_types;
8530
8531 /* If we already have such a type, use the old one. */
8532 hstate.add_object (TYPE_HASH (value_type));
8533 type_hash_list (arg_types, hstate);
8534 t = type_hash_canon (hstate.end (), t);
8535
8536 /* Set up the canonical type. */
8537 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8538 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8539 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8540 &any_structural_p,
8541 &any_noncanonical_p);
8542 if (any_structural_p)
8543 SET_TYPE_STRUCTURAL_EQUALITY (t);
8544 else if (any_noncanonical_p)
8545 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8546 canon_argtypes);
8547
8548 if (!COMPLETE_TYPE_P (t))
8549 layout_type (t);
8550 return t;
8551 }
8552
8553 /* Build a function type. The RETURN_TYPE is the type returned by the
8554 function. If VAARGS is set, no void_type_node is appended to the
8555 list. ARGP must be always be terminated be a NULL_TREE. */
8556
8557 static tree
8558 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8559 {
8560 tree t, args, last;
8561
8562 t = va_arg (argp, tree);
8563 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8564 args = tree_cons (NULL_TREE, t, args);
8565
8566 if (vaargs)
8567 {
8568 last = args;
8569 if (args != NULL_TREE)
8570 args = nreverse (args);
8571 gcc_assert (last != void_list_node);
8572 }
8573 else if (args == NULL_TREE)
8574 args = void_list_node;
8575 else
8576 {
8577 last = args;
8578 args = nreverse (args);
8579 TREE_CHAIN (last) = void_list_node;
8580 }
8581 args = build_function_type (return_type, args);
8582
8583 return args;
8584 }
8585
8586 /* Build a function type. The RETURN_TYPE is the type returned by the
8587 function. If additional arguments are provided, they are
8588 additional argument types. The list of argument types must always
8589 be terminated by NULL_TREE. */
8590
8591 tree
8592 build_function_type_list (tree return_type, ...)
8593 {
8594 tree args;
8595 va_list p;
8596
8597 va_start (p, return_type);
8598 args = build_function_type_list_1 (false, return_type, p);
8599 va_end (p);
8600 return args;
8601 }
8602
8603 /* Build a variable argument function type. The RETURN_TYPE is the
8604 type returned by the function. If additional arguments are provided,
8605 they are additional argument types. The list of argument types must
8606 always be terminated by NULL_TREE. */
8607
8608 tree
8609 build_varargs_function_type_list (tree return_type, ...)
8610 {
8611 tree args;
8612 va_list p;
8613
8614 va_start (p, return_type);
8615 args = build_function_type_list_1 (true, return_type, p);
8616 va_end (p);
8617
8618 return args;
8619 }
8620
8621 /* Build a function type. RETURN_TYPE is the type returned by the
8622 function; VAARGS indicates whether the function takes varargs. The
8623 function takes N named arguments, the types of which are provided in
8624 ARG_TYPES. */
8625
8626 static tree
8627 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8628 tree *arg_types)
8629 {
8630 int i;
8631 tree t = vaargs ? NULL_TREE : void_list_node;
8632
8633 for (i = n - 1; i >= 0; i--)
8634 t = tree_cons (NULL_TREE, arg_types[i], t);
8635
8636 return build_function_type (return_type, t);
8637 }
8638
8639 /* Build a function type. RETURN_TYPE is the type returned by the
8640 function. The function takes N named arguments, the types of which
8641 are provided in ARG_TYPES. */
8642
8643 tree
8644 build_function_type_array (tree return_type, int n, tree *arg_types)
8645 {
8646 return build_function_type_array_1 (false, return_type, n, arg_types);
8647 }
8648
8649 /* Build a variable argument function type. RETURN_TYPE is the type
8650 returned by the function. The function takes N named arguments, the
8651 types of which are provided in ARG_TYPES. */
8652
8653 tree
8654 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8655 {
8656 return build_function_type_array_1 (true, return_type, n, arg_types);
8657 }
8658
8659 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8660 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8661 for the method. An implicit additional parameter (of type
8662 pointer-to-BASETYPE) is added to the ARGTYPES. */
8663
8664 tree
8665 build_method_type_directly (tree basetype,
8666 tree rettype,
8667 tree argtypes)
8668 {
8669 tree t;
8670 tree ptype;
8671 inchash::hash hstate;
8672 bool any_structural_p, any_noncanonical_p;
8673 tree canon_argtypes;
8674
8675 /* Make a node of the sort we want. */
8676 t = make_node (METHOD_TYPE);
8677
8678 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8679 TREE_TYPE (t) = rettype;
8680 ptype = build_pointer_type (basetype);
8681
8682 /* The actual arglist for this function includes a "hidden" argument
8683 which is "this". Put it into the list of argument types. */
8684 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8685 TYPE_ARG_TYPES (t) = argtypes;
8686
8687 /* If we already have such a type, use the old one. */
8688 hstate.add_object (TYPE_HASH (basetype));
8689 hstate.add_object (TYPE_HASH (rettype));
8690 type_hash_list (argtypes, hstate);
8691 t = type_hash_canon (hstate.end (), t);
8692
8693 /* Set up the canonical type. */
8694 any_structural_p
8695 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8696 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8697 any_noncanonical_p
8698 = (TYPE_CANONICAL (basetype) != basetype
8699 || TYPE_CANONICAL (rettype) != rettype);
8700 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8701 &any_structural_p,
8702 &any_noncanonical_p);
8703 if (any_structural_p)
8704 SET_TYPE_STRUCTURAL_EQUALITY (t);
8705 else if (any_noncanonical_p)
8706 TYPE_CANONICAL (t)
8707 = build_method_type_directly (TYPE_CANONICAL (basetype),
8708 TYPE_CANONICAL (rettype),
8709 canon_argtypes);
8710 if (!COMPLETE_TYPE_P (t))
8711 layout_type (t);
8712
8713 return t;
8714 }
8715
8716 /* Construct, lay out and return the type of methods belonging to class
8717 BASETYPE and whose arguments and values are described by TYPE.
8718 If that type exists already, reuse it.
8719 TYPE must be a FUNCTION_TYPE node. */
8720
8721 tree
8722 build_method_type (tree basetype, tree type)
8723 {
8724 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8725
8726 return build_method_type_directly (basetype,
8727 TREE_TYPE (type),
8728 TYPE_ARG_TYPES (type));
8729 }
8730
8731 /* Construct, lay out and return the type of offsets to a value
8732 of type TYPE, within an object of type BASETYPE.
8733 If a suitable offset type exists already, reuse it. */
8734
8735 tree
8736 build_offset_type (tree basetype, tree type)
8737 {
8738 tree t;
8739 inchash::hash hstate;
8740
8741 /* Make a node of the sort we want. */
8742 t = make_node (OFFSET_TYPE);
8743
8744 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8745 TREE_TYPE (t) = type;
8746
8747 /* If we already have such a type, use the old one. */
8748 hstate.add_object (TYPE_HASH (basetype));
8749 hstate.add_object (TYPE_HASH (type));
8750 t = type_hash_canon (hstate.end (), t);
8751
8752 if (!COMPLETE_TYPE_P (t))
8753 layout_type (t);
8754
8755 if (TYPE_CANONICAL (t) == t)
8756 {
8757 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8758 || TYPE_STRUCTURAL_EQUALITY_P (type))
8759 SET_TYPE_STRUCTURAL_EQUALITY (t);
8760 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8761 || TYPE_CANONICAL (type) != type)
8762 TYPE_CANONICAL (t)
8763 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8764 TYPE_CANONICAL (type));
8765 }
8766
8767 return t;
8768 }
8769
8770 /* Create a complex type whose components are COMPONENT_TYPE. */
8771
8772 tree
8773 build_complex_type (tree component_type)
8774 {
8775 tree t;
8776 inchash::hash hstate;
8777
8778 gcc_assert (INTEGRAL_TYPE_P (component_type)
8779 || SCALAR_FLOAT_TYPE_P (component_type)
8780 || FIXED_POINT_TYPE_P (component_type));
8781
8782 /* Make a node of the sort we want. */
8783 t = make_node (COMPLEX_TYPE);
8784
8785 TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type);
8786
8787 /* If we already have such a type, use the old one. */
8788 hstate.add_object (TYPE_HASH (component_type));
8789 t = type_hash_canon (hstate.end (), t);
8790
8791 if (!COMPLETE_TYPE_P (t))
8792 layout_type (t);
8793
8794 if (TYPE_CANONICAL (t) == t)
8795 {
8796 if (TYPE_STRUCTURAL_EQUALITY_P (component_type))
8797 SET_TYPE_STRUCTURAL_EQUALITY (t);
8798 else if (TYPE_CANONICAL (component_type) != component_type)
8799 TYPE_CANONICAL (t)
8800 = build_complex_type (TYPE_CANONICAL (component_type));
8801 }
8802
8803 /* We need to create a name, since complex is a fundamental type. */
8804 if (! TYPE_NAME (t))
8805 {
8806 const char *name;
8807 if (component_type == char_type_node)
8808 name = "complex char";
8809 else if (component_type == signed_char_type_node)
8810 name = "complex signed char";
8811 else if (component_type == unsigned_char_type_node)
8812 name = "complex unsigned char";
8813 else if (component_type == short_integer_type_node)
8814 name = "complex short int";
8815 else if (component_type == short_unsigned_type_node)
8816 name = "complex short unsigned int";
8817 else if (component_type == integer_type_node)
8818 name = "complex int";
8819 else if (component_type == unsigned_type_node)
8820 name = "complex unsigned int";
8821 else if (component_type == long_integer_type_node)
8822 name = "complex long int";
8823 else if (component_type == long_unsigned_type_node)
8824 name = "complex long unsigned int";
8825 else if (component_type == long_long_integer_type_node)
8826 name = "complex long long int";
8827 else if (component_type == long_long_unsigned_type_node)
8828 name = "complex long long unsigned int";
8829 else
8830 name = 0;
8831
8832 if (name != 0)
8833 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8834 get_identifier (name), t);
8835 }
8836
8837 return build_qualified_type (t, TYPE_QUALS (component_type));
8838 }
8839
8840 /* If TYPE is a real or complex floating-point type and the target
8841 does not directly support arithmetic on TYPE then return the wider
8842 type to be used for arithmetic on TYPE. Otherwise, return
8843 NULL_TREE. */
8844
8845 tree
8846 excess_precision_type (tree type)
8847 {
8848 if (flag_excess_precision != EXCESS_PRECISION_FAST)
8849 {
8850 int flt_eval_method = TARGET_FLT_EVAL_METHOD;
8851 switch (TREE_CODE (type))
8852 {
8853 case REAL_TYPE:
8854 switch (flt_eval_method)
8855 {
8856 case 1:
8857 if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
8858 return double_type_node;
8859 break;
8860 case 2:
8861 if (TYPE_MODE (type) == TYPE_MODE (float_type_node)
8862 || TYPE_MODE (type) == TYPE_MODE (double_type_node))
8863 return long_double_type_node;
8864 break;
8865 default:
8866 gcc_unreachable ();
8867 }
8868 break;
8869 case COMPLEX_TYPE:
8870 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8871 return NULL_TREE;
8872 switch (flt_eval_method)
8873 {
8874 case 1:
8875 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node))
8876 return complex_double_type_node;
8877 break;
8878 case 2:
8879 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node)
8880 || (TYPE_MODE (TREE_TYPE (type))
8881 == TYPE_MODE (double_type_node)))
8882 return complex_long_double_type_node;
8883 break;
8884 default:
8885 gcc_unreachable ();
8886 }
8887 break;
8888 default:
8889 break;
8890 }
8891 }
8892 return NULL_TREE;
8893 }
8894 \f
8895 /* Return OP, stripped of any conversions to wider types as much as is safe.
8896 Converting the value back to OP's type makes a value equivalent to OP.
8897
8898 If FOR_TYPE is nonzero, we return a value which, if converted to
8899 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8900
8901 OP must have integer, real or enumeral type. Pointers are not allowed!
8902
8903 There are some cases where the obvious value we could return
8904 would regenerate to OP if converted to OP's type,
8905 but would not extend like OP to wider types.
8906 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8907 For example, if OP is (unsigned short)(signed char)-1,
8908 we avoid returning (signed char)-1 if FOR_TYPE is int,
8909 even though extending that to an unsigned short would regenerate OP,
8910 since the result of extending (signed char)-1 to (int)
8911 is different from (int) OP. */
8912
8913 tree
8914 get_unwidened (tree op, tree for_type)
8915 {
8916 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8917 tree type = TREE_TYPE (op);
8918 unsigned final_prec
8919 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8920 int uns
8921 = (for_type != 0 && for_type != type
8922 && final_prec > TYPE_PRECISION (type)
8923 && TYPE_UNSIGNED (type));
8924 tree win = op;
8925
8926 while (CONVERT_EXPR_P (op))
8927 {
8928 int bitschange;
8929
8930 /* TYPE_PRECISION on vector types has different meaning
8931 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8932 so avoid them here. */
8933 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8934 break;
8935
8936 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8937 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8938
8939 /* Truncations are many-one so cannot be removed.
8940 Unless we are later going to truncate down even farther. */
8941 if (bitschange < 0
8942 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8943 break;
8944
8945 /* See what's inside this conversion. If we decide to strip it,
8946 we will set WIN. */
8947 op = TREE_OPERAND (op, 0);
8948
8949 /* If we have not stripped any zero-extensions (uns is 0),
8950 we can strip any kind of extension.
8951 If we have previously stripped a zero-extension,
8952 only zero-extensions can safely be stripped.
8953 Any extension can be stripped if the bits it would produce
8954 are all going to be discarded later by truncating to FOR_TYPE. */
8955
8956 if (bitschange > 0)
8957 {
8958 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8959 win = op;
8960 /* TYPE_UNSIGNED says whether this is a zero-extension.
8961 Let's avoid computing it if it does not affect WIN
8962 and if UNS will not be needed again. */
8963 if ((uns
8964 || CONVERT_EXPR_P (op))
8965 && TYPE_UNSIGNED (TREE_TYPE (op)))
8966 {
8967 uns = 1;
8968 win = op;
8969 }
8970 }
8971 }
8972
8973 /* If we finally reach a constant see if it fits in for_type and
8974 in that case convert it. */
8975 if (for_type
8976 && TREE_CODE (win) == INTEGER_CST
8977 && TREE_TYPE (win) != for_type
8978 && int_fits_type_p (win, for_type))
8979 win = fold_convert (for_type, win);
8980
8981 return win;
8982 }
8983 \f
8984 /* Return OP or a simpler expression for a narrower value
8985 which can be sign-extended or zero-extended to give back OP.
8986 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8987 or 0 if the value should be sign-extended. */
8988
8989 tree
8990 get_narrower (tree op, int *unsignedp_ptr)
8991 {
8992 int uns = 0;
8993 int first = 1;
8994 tree win = op;
8995 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8996
8997 while (TREE_CODE (op) == NOP_EXPR)
8998 {
8999 int bitschange
9000 = (TYPE_PRECISION (TREE_TYPE (op))
9001 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
9002
9003 /* Truncations are many-one so cannot be removed. */
9004 if (bitschange < 0)
9005 break;
9006
9007 /* See what's inside this conversion. If we decide to strip it,
9008 we will set WIN. */
9009
9010 if (bitschange > 0)
9011 {
9012 op = TREE_OPERAND (op, 0);
9013 /* An extension: the outermost one can be stripped,
9014 but remember whether it is zero or sign extension. */
9015 if (first)
9016 uns = TYPE_UNSIGNED (TREE_TYPE (op));
9017 /* Otherwise, if a sign extension has been stripped,
9018 only sign extensions can now be stripped;
9019 if a zero extension has been stripped, only zero-extensions. */
9020 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
9021 break;
9022 first = 0;
9023 }
9024 else /* bitschange == 0 */
9025 {
9026 /* A change in nominal type can always be stripped, but we must
9027 preserve the unsignedness. */
9028 if (first)
9029 uns = TYPE_UNSIGNED (TREE_TYPE (op));
9030 first = 0;
9031 op = TREE_OPERAND (op, 0);
9032 /* Keep trying to narrow, but don't assign op to win if it
9033 would turn an integral type into something else. */
9034 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
9035 continue;
9036 }
9037
9038 win = op;
9039 }
9040
9041 if (TREE_CODE (op) == COMPONENT_REF
9042 /* Since type_for_size always gives an integer type. */
9043 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
9044 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
9045 /* Ensure field is laid out already. */
9046 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
9047 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
9048 {
9049 unsigned HOST_WIDE_INT innerprec
9050 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
9051 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
9052 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
9053 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
9054
9055 /* We can get this structure field in a narrower type that fits it,
9056 but the resulting extension to its nominal type (a fullword type)
9057 must satisfy the same conditions as for other extensions.
9058
9059 Do this only for fields that are aligned (not bit-fields),
9060 because when bit-field insns will be used there is no
9061 advantage in doing this. */
9062
9063 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
9064 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
9065 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
9066 && type != 0)
9067 {
9068 if (first)
9069 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
9070 win = fold_convert (type, op);
9071 }
9072 }
9073
9074 *unsignedp_ptr = uns;
9075 return win;
9076 }
9077 \f
9078 /* Returns true if integer constant C has a value that is permissible
9079 for type TYPE (an INTEGER_TYPE). */
9080
9081 bool
9082 int_fits_type_p (const_tree c, const_tree type)
9083 {
9084 tree type_low_bound, type_high_bound;
9085 bool ok_for_low_bound, ok_for_high_bound;
9086 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
9087
9088 retry:
9089 type_low_bound = TYPE_MIN_VALUE (type);
9090 type_high_bound = TYPE_MAX_VALUE (type);
9091
9092 /* If at least one bound of the type is a constant integer, we can check
9093 ourselves and maybe make a decision. If no such decision is possible, but
9094 this type is a subtype, try checking against that. Otherwise, use
9095 fits_to_tree_p, which checks against the precision.
9096
9097 Compute the status for each possibly constant bound, and return if we see
9098 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
9099 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
9100 for "constant known to fit". */
9101
9102 /* Check if c >= type_low_bound. */
9103 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
9104 {
9105 if (tree_int_cst_lt (c, type_low_bound))
9106 return false;
9107 ok_for_low_bound = true;
9108 }
9109 else
9110 ok_for_low_bound = false;
9111
9112 /* Check if c <= type_high_bound. */
9113 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
9114 {
9115 if (tree_int_cst_lt (type_high_bound, c))
9116 return false;
9117 ok_for_high_bound = true;
9118 }
9119 else
9120 ok_for_high_bound = false;
9121
9122 /* If the constant fits both bounds, the result is known. */
9123 if (ok_for_low_bound && ok_for_high_bound)
9124 return true;
9125
9126 /* Perform some generic filtering which may allow making a decision
9127 even if the bounds are not constant. First, negative integers
9128 never fit in unsigned types, */
9129 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (c))
9130 return false;
9131
9132 /* Second, narrower types always fit in wider ones. */
9133 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
9134 return true;
9135
9136 /* Third, unsigned integers with top bit set never fit signed types. */
9137 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
9138 {
9139 int prec = GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (c))) - 1;
9140 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
9141 {
9142 /* When a tree_cst is converted to a wide-int, the precision
9143 is taken from the type. However, if the precision of the
9144 mode underneath the type is smaller than that, it is
9145 possible that the value will not fit. The test below
9146 fails if any bit is set between the sign bit of the
9147 underlying mode and the top bit of the type. */
9148 if (wi::ne_p (wi::zext (c, prec - 1), c))
9149 return false;
9150 }
9151 else if (wi::neg_p (c))
9152 return false;
9153 }
9154
9155 /* If we haven't been able to decide at this point, there nothing more we
9156 can check ourselves here. Look at the base type if we have one and it
9157 has the same precision. */
9158 if (TREE_CODE (type) == INTEGER_TYPE
9159 && TREE_TYPE (type) != 0
9160 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
9161 {
9162 type = TREE_TYPE (type);
9163 goto retry;
9164 }
9165
9166 /* Or to fits_to_tree_p, if nothing else. */
9167 return wi::fits_to_tree_p (c, type);
9168 }
9169
9170 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
9171 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
9172 represented (assuming two's-complement arithmetic) within the bit
9173 precision of the type are returned instead. */
9174
9175 void
9176 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
9177 {
9178 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
9179 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
9180 wi::to_mpz (TYPE_MIN_VALUE (type), min, TYPE_SIGN (type));
9181 else
9182 {
9183 if (TYPE_UNSIGNED (type))
9184 mpz_set_ui (min, 0);
9185 else
9186 {
9187 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
9188 wi::to_mpz (mn, min, SIGNED);
9189 }
9190 }
9191
9192 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
9193 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
9194 wi::to_mpz (TYPE_MAX_VALUE (type), max, TYPE_SIGN (type));
9195 else
9196 {
9197 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
9198 wi::to_mpz (mn, max, TYPE_SIGN (type));
9199 }
9200 }
9201
9202 /* Return true if VAR is an automatic variable defined in function FN. */
9203
9204 bool
9205 auto_var_in_fn_p (const_tree var, const_tree fn)
9206 {
9207 return (DECL_P (var) && DECL_CONTEXT (var) == fn
9208 && ((((TREE_CODE (var) == VAR_DECL && ! DECL_EXTERNAL (var))
9209 || TREE_CODE (var) == PARM_DECL)
9210 && ! TREE_STATIC (var))
9211 || TREE_CODE (var) == LABEL_DECL
9212 || TREE_CODE (var) == RESULT_DECL));
9213 }
9214
9215 /* Subprogram of following function. Called by walk_tree.
9216
9217 Return *TP if it is an automatic variable or parameter of the
9218 function passed in as DATA. */
9219
9220 static tree
9221 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
9222 {
9223 tree fn = (tree) data;
9224
9225 if (TYPE_P (*tp))
9226 *walk_subtrees = 0;
9227
9228 else if (DECL_P (*tp)
9229 && auto_var_in_fn_p (*tp, fn))
9230 return *tp;
9231
9232 return NULL_TREE;
9233 }
9234
9235 /* Returns true if T is, contains, or refers to a type with variable
9236 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
9237 arguments, but not the return type. If FN is nonzero, only return
9238 true if a modifier of the type or position of FN is a variable or
9239 parameter inside FN.
9240
9241 This concept is more general than that of C99 'variably modified types':
9242 in C99, a struct type is never variably modified because a VLA may not
9243 appear as a structure member. However, in GNU C code like:
9244
9245 struct S { int i[f()]; };
9246
9247 is valid, and other languages may define similar constructs. */
9248
9249 bool
9250 variably_modified_type_p (tree type, tree fn)
9251 {
9252 tree t;
9253
9254 /* Test if T is either variable (if FN is zero) or an expression containing
9255 a variable in FN. If TYPE isn't gimplified, return true also if
9256 gimplify_one_sizepos would gimplify the expression into a local
9257 variable. */
9258 #define RETURN_TRUE_IF_VAR(T) \
9259 do { tree _t = (T); \
9260 if (_t != NULL_TREE \
9261 && _t != error_mark_node \
9262 && TREE_CODE (_t) != INTEGER_CST \
9263 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
9264 && (!fn \
9265 || (!TYPE_SIZES_GIMPLIFIED (type) \
9266 && !is_gimple_sizepos (_t)) \
9267 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
9268 return true; } while (0)
9269
9270 if (type == error_mark_node)
9271 return false;
9272
9273 /* If TYPE itself has variable size, it is variably modified. */
9274 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
9275 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
9276
9277 switch (TREE_CODE (type))
9278 {
9279 case POINTER_TYPE:
9280 case REFERENCE_TYPE:
9281 case VECTOR_TYPE:
9282 if (variably_modified_type_p (TREE_TYPE (type), fn))
9283 return true;
9284 break;
9285
9286 case FUNCTION_TYPE:
9287 case METHOD_TYPE:
9288 /* If TYPE is a function type, it is variably modified if the
9289 return type is variably modified. */
9290 if (variably_modified_type_p (TREE_TYPE (type), fn))
9291 return true;
9292 break;
9293
9294 case INTEGER_TYPE:
9295 case REAL_TYPE:
9296 case FIXED_POINT_TYPE:
9297 case ENUMERAL_TYPE:
9298 case BOOLEAN_TYPE:
9299 /* Scalar types are variably modified if their end points
9300 aren't constant. */
9301 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
9302 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
9303 break;
9304
9305 case RECORD_TYPE:
9306 case UNION_TYPE:
9307 case QUAL_UNION_TYPE:
9308 /* We can't see if any of the fields are variably-modified by the
9309 definition we normally use, since that would produce infinite
9310 recursion via pointers. */
9311 /* This is variably modified if some field's type is. */
9312 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
9313 if (TREE_CODE (t) == FIELD_DECL)
9314 {
9315 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
9316 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
9317 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
9318
9319 if (TREE_CODE (type) == QUAL_UNION_TYPE)
9320 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
9321 }
9322 break;
9323
9324 case ARRAY_TYPE:
9325 /* Do not call ourselves to avoid infinite recursion. This is
9326 variably modified if the element type is. */
9327 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
9328 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
9329 break;
9330
9331 default:
9332 break;
9333 }
9334
9335 /* The current language may have other cases to check, but in general,
9336 all other types are not variably modified. */
9337 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
9338
9339 #undef RETURN_TRUE_IF_VAR
9340 }
9341
9342 /* Given a DECL or TYPE, return the scope in which it was declared, or
9343 NULL_TREE if there is no containing scope. */
9344
9345 tree
9346 get_containing_scope (const_tree t)
9347 {
9348 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
9349 }
9350
9351 /* Return the innermost context enclosing DECL that is
9352 a FUNCTION_DECL, or zero if none. */
9353
9354 tree
9355 decl_function_context (const_tree decl)
9356 {
9357 tree context;
9358
9359 if (TREE_CODE (decl) == ERROR_MARK)
9360 return 0;
9361
9362 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
9363 where we look up the function at runtime. Such functions always take
9364 a first argument of type 'pointer to real context'.
9365
9366 C++ should really be fixed to use DECL_CONTEXT for the real context,
9367 and use something else for the "virtual context". */
9368 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
9369 context
9370 = TYPE_MAIN_VARIANT
9371 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
9372 else
9373 context = DECL_CONTEXT (decl);
9374
9375 while (context && TREE_CODE (context) != FUNCTION_DECL)
9376 {
9377 if (TREE_CODE (context) == BLOCK)
9378 context = BLOCK_SUPERCONTEXT (context);
9379 else
9380 context = get_containing_scope (context);
9381 }
9382
9383 return context;
9384 }
9385
9386 /* Return the innermost context enclosing DECL that is
9387 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
9388 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
9389
9390 tree
9391 decl_type_context (const_tree decl)
9392 {
9393 tree context = DECL_CONTEXT (decl);
9394
9395 while (context)
9396 switch (TREE_CODE (context))
9397 {
9398 case NAMESPACE_DECL:
9399 case TRANSLATION_UNIT_DECL:
9400 return NULL_TREE;
9401
9402 case RECORD_TYPE:
9403 case UNION_TYPE:
9404 case QUAL_UNION_TYPE:
9405 return context;
9406
9407 case TYPE_DECL:
9408 case FUNCTION_DECL:
9409 context = DECL_CONTEXT (context);
9410 break;
9411
9412 case BLOCK:
9413 context = BLOCK_SUPERCONTEXT (context);
9414 break;
9415
9416 default:
9417 gcc_unreachable ();
9418 }
9419
9420 return NULL_TREE;
9421 }
9422
9423 /* CALL is a CALL_EXPR. Return the declaration for the function
9424 called, or NULL_TREE if the called function cannot be
9425 determined. */
9426
9427 tree
9428 get_callee_fndecl (const_tree call)
9429 {
9430 tree addr;
9431
9432 if (call == error_mark_node)
9433 return error_mark_node;
9434
9435 /* It's invalid to call this function with anything but a
9436 CALL_EXPR. */
9437 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9438
9439 /* The first operand to the CALL is the address of the function
9440 called. */
9441 addr = CALL_EXPR_FN (call);
9442
9443 /* If there is no function, return early. */
9444 if (addr == NULL_TREE)
9445 return NULL_TREE;
9446
9447 STRIP_NOPS (addr);
9448
9449 /* If this is a readonly function pointer, extract its initial value. */
9450 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
9451 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
9452 && DECL_INITIAL (addr))
9453 addr = DECL_INITIAL (addr);
9454
9455 /* If the address is just `&f' for some function `f', then we know
9456 that `f' is being called. */
9457 if (TREE_CODE (addr) == ADDR_EXPR
9458 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
9459 return TREE_OPERAND (addr, 0);
9460
9461 /* We couldn't figure out what was being called. */
9462 return NULL_TREE;
9463 }
9464
9465 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
9466 return the associated function code, otherwise return CFN_LAST. */
9467
9468 combined_fn
9469 get_call_combined_fn (const_tree call)
9470 {
9471 /* It's invalid to call this function with anything but a CALL_EXPR. */
9472 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9473
9474 if (!CALL_EXPR_FN (call))
9475 return as_combined_fn (CALL_EXPR_IFN (call));
9476
9477 tree fndecl = get_callee_fndecl (call);
9478 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
9479 return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
9480
9481 return CFN_LAST;
9482 }
9483
9484 #define TREE_MEM_USAGE_SPACES 40
9485
9486 /* Print debugging information about tree nodes generated during the compile,
9487 and any language-specific information. */
9488
9489 void
9490 dump_tree_statistics (void)
9491 {
9492 if (GATHER_STATISTICS)
9493 {
9494 int i;
9495 int total_nodes, total_bytes;
9496 fprintf (stderr, "\nKind Nodes Bytes\n");
9497 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9498 total_nodes = total_bytes = 0;
9499 for (i = 0; i < (int) all_kinds; i++)
9500 {
9501 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i],
9502 tree_node_counts[i], tree_node_sizes[i]);
9503 total_nodes += tree_node_counts[i];
9504 total_bytes += tree_node_sizes[i];
9505 }
9506 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9507 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes);
9508 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9509 fprintf (stderr, "Code Nodes\n");
9510 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9511 for (i = 0; i < (int) MAX_TREE_CODES; i++)
9512 fprintf (stderr, "%-32s %7d\n", get_tree_code_name ((enum tree_code) i),
9513 tree_code_counts[i]);
9514 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9515 fprintf (stderr, "\n");
9516 ssanames_print_statistics ();
9517 fprintf (stderr, "\n");
9518 phinodes_print_statistics ();
9519 fprintf (stderr, "\n");
9520 }
9521 else
9522 fprintf (stderr, "(No per-node statistics)\n");
9523
9524 print_type_hash_statistics ();
9525 print_debug_expr_statistics ();
9526 print_value_expr_statistics ();
9527 lang_hooks.print_statistics ();
9528 }
9529 \f
9530 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9531
9532 /* Generate a crc32 of a byte. */
9533
9534 static unsigned
9535 crc32_unsigned_bits (unsigned chksum, unsigned value, unsigned bits)
9536 {
9537 unsigned ix;
9538
9539 for (ix = bits; ix--; value <<= 1)
9540 {
9541 unsigned feedback;
9542
9543 feedback = (value ^ chksum) & 0x80000000 ? 0x04c11db7 : 0;
9544 chksum <<= 1;
9545 chksum ^= feedback;
9546 }
9547 return chksum;
9548 }
9549
9550 /* Generate a crc32 of a 32-bit unsigned. */
9551
9552 unsigned
9553 crc32_unsigned (unsigned chksum, unsigned value)
9554 {
9555 return crc32_unsigned_bits (chksum, value, 32);
9556 }
9557
9558 /* Generate a crc32 of a byte. */
9559
9560 unsigned
9561 crc32_byte (unsigned chksum, char byte)
9562 {
9563 return crc32_unsigned_bits (chksum, (unsigned) byte << 24, 8);
9564 }
9565
9566 /* Generate a crc32 of a string. */
9567
9568 unsigned
9569 crc32_string (unsigned chksum, const char *string)
9570 {
9571 do
9572 {
9573 chksum = crc32_byte (chksum, *string);
9574 }
9575 while (*string++);
9576 return chksum;
9577 }
9578
9579 /* P is a string that will be used in a symbol. Mask out any characters
9580 that are not valid in that context. */
9581
9582 void
9583 clean_symbol_name (char *p)
9584 {
9585 for (; *p; p++)
9586 if (! (ISALNUM (*p)
9587 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9588 || *p == '$'
9589 #endif
9590 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9591 || *p == '.'
9592 #endif
9593 ))
9594 *p = '_';
9595 }
9596
9597 /* For anonymous aggregate types, we need some sort of name to
9598 hold on to. In practice, this should not appear, but it should
9599 not be harmful if it does. */
9600 bool
9601 anon_aggrname_p(const_tree id_node)
9602 {
9603 #ifndef NO_DOT_IN_LABEL
9604 return (IDENTIFIER_POINTER (id_node)[0] == '.'
9605 && IDENTIFIER_POINTER (id_node)[1] == '_');
9606 #else /* NO_DOT_IN_LABEL */
9607 #ifndef NO_DOLLAR_IN_LABEL
9608 return (IDENTIFIER_POINTER (id_node)[0] == '$' \
9609 && IDENTIFIER_POINTER (id_node)[1] == '_');
9610 #else /* NO_DOLLAR_IN_LABEL */
9611 #define ANON_AGGRNAME_PREFIX "__anon_"
9612 return (!strncmp (IDENTIFIER_POINTER (id_node), ANON_AGGRNAME_PREFIX,
9613 sizeof (ANON_AGGRNAME_PREFIX) - 1));
9614 #endif /* NO_DOLLAR_IN_LABEL */
9615 #endif /* NO_DOT_IN_LABEL */
9616 }
9617
9618 /* Return a format for an anonymous aggregate name. */
9619 const char *
9620 anon_aggrname_format()
9621 {
9622 #ifndef NO_DOT_IN_LABEL
9623 return "._%d";
9624 #else /* NO_DOT_IN_LABEL */
9625 #ifndef NO_DOLLAR_IN_LABEL
9626 return "$_%d";
9627 #else /* NO_DOLLAR_IN_LABEL */
9628 return "__anon_%d";
9629 #endif /* NO_DOLLAR_IN_LABEL */
9630 #endif /* NO_DOT_IN_LABEL */
9631 }
9632
9633 /* Generate a name for a special-purpose function.
9634 The generated name may need to be unique across the whole link.
9635 Changes to this function may also require corresponding changes to
9636 xstrdup_mask_random.
9637 TYPE is some string to identify the purpose of this function to the
9638 linker or collect2; it must start with an uppercase letter,
9639 one of:
9640 I - for constructors
9641 D - for destructors
9642 N - for C++ anonymous namespaces
9643 F - for DWARF unwind frame information. */
9644
9645 tree
9646 get_file_function_name (const char *type)
9647 {
9648 char *buf;
9649 const char *p;
9650 char *q;
9651
9652 /* If we already have a name we know to be unique, just use that. */
9653 if (first_global_object_name)
9654 p = q = ASTRDUP (first_global_object_name);
9655 /* If the target is handling the constructors/destructors, they
9656 will be local to this file and the name is only necessary for
9657 debugging purposes.
9658 We also assign sub_I and sub_D sufixes to constructors called from
9659 the global static constructors. These are always local. */
9660 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9661 || (strncmp (type, "sub_", 4) == 0
9662 && (type[4] == 'I' || type[4] == 'D')))
9663 {
9664 const char *file = main_input_filename;
9665 if (! file)
9666 file = LOCATION_FILE (input_location);
9667 /* Just use the file's basename, because the full pathname
9668 might be quite long. */
9669 p = q = ASTRDUP (lbasename (file));
9670 }
9671 else
9672 {
9673 /* Otherwise, the name must be unique across the entire link.
9674 We don't have anything that we know to be unique to this translation
9675 unit, so use what we do have and throw in some randomness. */
9676 unsigned len;
9677 const char *name = weak_global_object_name;
9678 const char *file = main_input_filename;
9679
9680 if (! name)
9681 name = "";
9682 if (! file)
9683 file = LOCATION_FILE (input_location);
9684
9685 len = strlen (file);
9686 q = (char *) alloca (9 + 17 + len + 1);
9687 memcpy (q, file, len + 1);
9688
9689 snprintf (q + len, 9 + 17 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9690 crc32_string (0, name), get_random_seed (false));
9691
9692 p = q;
9693 }
9694
9695 clean_symbol_name (q);
9696 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9697 + strlen (type));
9698
9699 /* Set up the name of the file-level functions we may need.
9700 Use a global object (which is already required to be unique over
9701 the program) rather than the file name (which imposes extra
9702 constraints). */
9703 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9704
9705 return get_identifier (buf);
9706 }
9707 \f
9708 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9709
9710 /* Complain that the tree code of NODE does not match the expected 0
9711 terminated list of trailing codes. The trailing code list can be
9712 empty, for a more vague error message. FILE, LINE, and FUNCTION
9713 are of the caller. */
9714
9715 void
9716 tree_check_failed (const_tree node, const char *file,
9717 int line, const char *function, ...)
9718 {
9719 va_list args;
9720 const char *buffer;
9721 unsigned length = 0;
9722 enum tree_code code;
9723
9724 va_start (args, function);
9725 while ((code = (enum tree_code) va_arg (args, int)))
9726 length += 4 + strlen (get_tree_code_name (code));
9727 va_end (args);
9728 if (length)
9729 {
9730 char *tmp;
9731 va_start (args, function);
9732 length += strlen ("expected ");
9733 buffer = tmp = (char *) alloca (length);
9734 length = 0;
9735 while ((code = (enum tree_code) va_arg (args, int)))
9736 {
9737 const char *prefix = length ? " or " : "expected ";
9738
9739 strcpy (tmp + length, prefix);
9740 length += strlen (prefix);
9741 strcpy (tmp + length, get_tree_code_name (code));
9742 length += strlen (get_tree_code_name (code));
9743 }
9744 va_end (args);
9745 }
9746 else
9747 buffer = "unexpected node";
9748
9749 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9750 buffer, get_tree_code_name (TREE_CODE (node)),
9751 function, trim_filename (file), line);
9752 }
9753
9754 /* Complain that the tree code of NODE does match the expected 0
9755 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9756 the caller. */
9757
9758 void
9759 tree_not_check_failed (const_tree node, const char *file,
9760 int line, const char *function, ...)
9761 {
9762 va_list args;
9763 char *buffer;
9764 unsigned length = 0;
9765 enum tree_code code;
9766
9767 va_start (args, function);
9768 while ((code = (enum tree_code) va_arg (args, int)))
9769 length += 4 + strlen (get_tree_code_name (code));
9770 va_end (args);
9771 va_start (args, function);
9772 buffer = (char *) alloca (length);
9773 length = 0;
9774 while ((code = (enum tree_code) va_arg (args, int)))
9775 {
9776 if (length)
9777 {
9778 strcpy (buffer + length, " or ");
9779 length += 4;
9780 }
9781 strcpy (buffer + length, get_tree_code_name (code));
9782 length += strlen (get_tree_code_name (code));
9783 }
9784 va_end (args);
9785
9786 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9787 buffer, get_tree_code_name (TREE_CODE (node)),
9788 function, trim_filename (file), line);
9789 }
9790
9791 /* Similar to tree_check_failed, except that we check for a class of tree
9792 code, given in CL. */
9793
9794 void
9795 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9796 const char *file, int line, const char *function)
9797 {
9798 internal_error
9799 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9800 TREE_CODE_CLASS_STRING (cl),
9801 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9802 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9803 }
9804
9805 /* Similar to tree_check_failed, except that instead of specifying a
9806 dozen codes, use the knowledge that they're all sequential. */
9807
9808 void
9809 tree_range_check_failed (const_tree node, const char *file, int line,
9810 const char *function, enum tree_code c1,
9811 enum tree_code c2)
9812 {
9813 char *buffer;
9814 unsigned length = 0;
9815 unsigned int c;
9816
9817 for (c = c1; c <= c2; ++c)
9818 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9819
9820 length += strlen ("expected ");
9821 buffer = (char *) alloca (length);
9822 length = 0;
9823
9824 for (c = c1; c <= c2; ++c)
9825 {
9826 const char *prefix = length ? " or " : "expected ";
9827
9828 strcpy (buffer + length, prefix);
9829 length += strlen (prefix);
9830 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9831 length += strlen (get_tree_code_name ((enum tree_code) c));
9832 }
9833
9834 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9835 buffer, get_tree_code_name (TREE_CODE (node)),
9836 function, trim_filename (file), line);
9837 }
9838
9839
9840 /* Similar to tree_check_failed, except that we check that a tree does
9841 not have the specified code, given in CL. */
9842
9843 void
9844 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9845 const char *file, int line, const char *function)
9846 {
9847 internal_error
9848 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9849 TREE_CODE_CLASS_STRING (cl),
9850 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9851 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9852 }
9853
9854
9855 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9856
9857 void
9858 omp_clause_check_failed (const_tree node, const char *file, int line,
9859 const char *function, enum omp_clause_code code)
9860 {
9861 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9862 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9863 function, trim_filename (file), line);
9864 }
9865
9866
9867 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9868
9869 void
9870 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9871 const char *function, enum omp_clause_code c1,
9872 enum omp_clause_code c2)
9873 {
9874 char *buffer;
9875 unsigned length = 0;
9876 unsigned int c;
9877
9878 for (c = c1; c <= c2; ++c)
9879 length += 4 + strlen (omp_clause_code_name[c]);
9880
9881 length += strlen ("expected ");
9882 buffer = (char *) alloca (length);
9883 length = 0;
9884
9885 for (c = c1; c <= c2; ++c)
9886 {
9887 const char *prefix = length ? " or " : "expected ";
9888
9889 strcpy (buffer + length, prefix);
9890 length += strlen (prefix);
9891 strcpy (buffer + length, omp_clause_code_name[c]);
9892 length += strlen (omp_clause_code_name[c]);
9893 }
9894
9895 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9896 buffer, omp_clause_code_name[TREE_CODE (node)],
9897 function, trim_filename (file), line);
9898 }
9899
9900
9901 #undef DEFTREESTRUCT
9902 #define DEFTREESTRUCT(VAL, NAME) NAME,
9903
9904 static const char *ts_enum_names[] = {
9905 #include "treestruct.def"
9906 };
9907 #undef DEFTREESTRUCT
9908
9909 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9910
9911 /* Similar to tree_class_check_failed, except that we check for
9912 whether CODE contains the tree structure identified by EN. */
9913
9914 void
9915 tree_contains_struct_check_failed (const_tree node,
9916 const enum tree_node_structure_enum en,
9917 const char *file, int line,
9918 const char *function)
9919 {
9920 internal_error
9921 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9922 TS_ENUM_NAME (en),
9923 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9924 }
9925
9926
9927 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9928 (dynamically sized) vector. */
9929
9930 void
9931 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9932 const char *function)
9933 {
9934 internal_error
9935 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9936 idx + 1, len, function, trim_filename (file), line);
9937 }
9938
9939 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9940 (dynamically sized) vector. */
9941
9942 void
9943 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9944 const char *function)
9945 {
9946 internal_error
9947 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9948 idx + 1, len, function, trim_filename (file), line);
9949 }
9950
9951 /* Similar to above, except that the check is for the bounds of the operand
9952 vector of an expression node EXP. */
9953
9954 void
9955 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9956 int line, const char *function)
9957 {
9958 enum tree_code code = TREE_CODE (exp);
9959 internal_error
9960 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9961 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9962 function, trim_filename (file), line);
9963 }
9964
9965 /* Similar to above, except that the check is for the number of
9966 operands of an OMP_CLAUSE node. */
9967
9968 void
9969 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9970 int line, const char *function)
9971 {
9972 internal_error
9973 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9974 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9975 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9976 trim_filename (file), line);
9977 }
9978 #endif /* ENABLE_TREE_CHECKING */
9979 \f
9980 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9981 and mapped to the machine mode MODE. Initialize its fields and build
9982 the information necessary for debugging output. */
9983
9984 static tree
9985 make_vector_type (tree innertype, int nunits, machine_mode mode)
9986 {
9987 tree t;
9988 inchash::hash hstate;
9989 tree mv_innertype = TYPE_MAIN_VARIANT (innertype);
9990
9991 t = make_node (VECTOR_TYPE);
9992 TREE_TYPE (t) = mv_innertype;
9993 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9994 SET_TYPE_MODE (t, mode);
9995
9996 if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype) || in_lto_p)
9997 SET_TYPE_STRUCTURAL_EQUALITY (t);
9998 else if ((TYPE_CANONICAL (mv_innertype) != innertype
9999 || mode != VOIDmode)
10000 && !VECTOR_BOOLEAN_TYPE_P (t))
10001 TYPE_CANONICAL (t)
10002 = make_vector_type (TYPE_CANONICAL (mv_innertype), nunits, VOIDmode);
10003
10004 layout_type (t);
10005
10006 hstate.add_wide_int (VECTOR_TYPE);
10007 hstate.add_wide_int (nunits);
10008 hstate.add_wide_int (mode);
10009 hstate.add_object (TYPE_HASH (TREE_TYPE (t)));
10010 t = type_hash_canon (hstate.end (), t);
10011
10012 /* We have built a main variant, based on the main variant of the
10013 inner type. Use it to build the variant we return. */
10014 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
10015 && TREE_TYPE (t) != innertype)
10016 return build_type_attribute_qual_variant (t,
10017 TYPE_ATTRIBUTES (innertype),
10018 TYPE_QUALS (innertype));
10019
10020 return t;
10021 }
10022
10023 static tree
10024 make_or_reuse_type (unsigned size, int unsignedp)
10025 {
10026 int i;
10027
10028 if (size == INT_TYPE_SIZE)
10029 return unsignedp ? unsigned_type_node : integer_type_node;
10030 if (size == CHAR_TYPE_SIZE)
10031 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
10032 if (size == SHORT_TYPE_SIZE)
10033 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
10034 if (size == LONG_TYPE_SIZE)
10035 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
10036 if (size == LONG_LONG_TYPE_SIZE)
10037 return (unsignedp ? long_long_unsigned_type_node
10038 : long_long_integer_type_node);
10039
10040 for (i = 0; i < NUM_INT_N_ENTS; i ++)
10041 if (size == int_n_data[i].bitsize
10042 && int_n_enabled_p[i])
10043 return (unsignedp ? int_n_trees[i].unsigned_type
10044 : int_n_trees[i].signed_type);
10045
10046 if (unsignedp)
10047 return make_unsigned_type (size);
10048 else
10049 return make_signed_type (size);
10050 }
10051
10052 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
10053
10054 static tree
10055 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
10056 {
10057 if (satp)
10058 {
10059 if (size == SHORT_FRACT_TYPE_SIZE)
10060 return unsignedp ? sat_unsigned_short_fract_type_node
10061 : sat_short_fract_type_node;
10062 if (size == FRACT_TYPE_SIZE)
10063 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
10064 if (size == LONG_FRACT_TYPE_SIZE)
10065 return unsignedp ? sat_unsigned_long_fract_type_node
10066 : sat_long_fract_type_node;
10067 if (size == LONG_LONG_FRACT_TYPE_SIZE)
10068 return unsignedp ? sat_unsigned_long_long_fract_type_node
10069 : sat_long_long_fract_type_node;
10070 }
10071 else
10072 {
10073 if (size == SHORT_FRACT_TYPE_SIZE)
10074 return unsignedp ? unsigned_short_fract_type_node
10075 : short_fract_type_node;
10076 if (size == FRACT_TYPE_SIZE)
10077 return unsignedp ? unsigned_fract_type_node : fract_type_node;
10078 if (size == LONG_FRACT_TYPE_SIZE)
10079 return unsignedp ? unsigned_long_fract_type_node
10080 : long_fract_type_node;
10081 if (size == LONG_LONG_FRACT_TYPE_SIZE)
10082 return unsignedp ? unsigned_long_long_fract_type_node
10083 : long_long_fract_type_node;
10084 }
10085
10086 return make_fract_type (size, unsignedp, satp);
10087 }
10088
10089 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
10090
10091 static tree
10092 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
10093 {
10094 if (satp)
10095 {
10096 if (size == SHORT_ACCUM_TYPE_SIZE)
10097 return unsignedp ? sat_unsigned_short_accum_type_node
10098 : sat_short_accum_type_node;
10099 if (size == ACCUM_TYPE_SIZE)
10100 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
10101 if (size == LONG_ACCUM_TYPE_SIZE)
10102 return unsignedp ? sat_unsigned_long_accum_type_node
10103 : sat_long_accum_type_node;
10104 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
10105 return unsignedp ? sat_unsigned_long_long_accum_type_node
10106 : sat_long_long_accum_type_node;
10107 }
10108 else
10109 {
10110 if (size == SHORT_ACCUM_TYPE_SIZE)
10111 return unsignedp ? unsigned_short_accum_type_node
10112 : short_accum_type_node;
10113 if (size == ACCUM_TYPE_SIZE)
10114 return unsignedp ? unsigned_accum_type_node : accum_type_node;
10115 if (size == LONG_ACCUM_TYPE_SIZE)
10116 return unsignedp ? unsigned_long_accum_type_node
10117 : long_accum_type_node;
10118 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
10119 return unsignedp ? unsigned_long_long_accum_type_node
10120 : long_long_accum_type_node;
10121 }
10122
10123 return make_accum_type (size, unsignedp, satp);
10124 }
10125
10126
10127 /* Create an atomic variant node for TYPE. This routine is called
10128 during initialization of data types to create the 5 basic atomic
10129 types. The generic build_variant_type function requires these to
10130 already be set up in order to function properly, so cannot be
10131 called from there. If ALIGN is non-zero, then ensure alignment is
10132 overridden to this value. */
10133
10134 static tree
10135 build_atomic_base (tree type, unsigned int align)
10136 {
10137 tree t;
10138
10139 /* Make sure its not already registered. */
10140 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
10141 return t;
10142
10143 t = build_variant_type_copy (type);
10144 set_type_quals (t, TYPE_QUAL_ATOMIC);
10145
10146 if (align)
10147 SET_TYPE_ALIGN (t, align);
10148
10149 return t;
10150 }
10151
10152 /* Create nodes for all integer types (and error_mark_node) using the sizes
10153 of C datatypes. SIGNED_CHAR specifies whether char is signed. */
10154
10155 void
10156 build_common_tree_nodes (bool signed_char)
10157 {
10158 int i;
10159
10160 error_mark_node = make_node (ERROR_MARK);
10161 TREE_TYPE (error_mark_node) = error_mark_node;
10162
10163 initialize_sizetypes ();
10164
10165 /* Define both `signed char' and `unsigned char'. */
10166 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
10167 TYPE_STRING_FLAG (signed_char_type_node) = 1;
10168 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
10169 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
10170
10171 /* Define `char', which is like either `signed char' or `unsigned char'
10172 but not the same as either. */
10173 char_type_node
10174 = (signed_char
10175 ? make_signed_type (CHAR_TYPE_SIZE)
10176 : make_unsigned_type (CHAR_TYPE_SIZE));
10177 TYPE_STRING_FLAG (char_type_node) = 1;
10178
10179 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
10180 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
10181 integer_type_node = make_signed_type (INT_TYPE_SIZE);
10182 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
10183 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
10184 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
10185 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
10186 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
10187
10188 for (i = 0; i < NUM_INT_N_ENTS; i ++)
10189 {
10190 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
10191 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
10192 TYPE_SIZE (int_n_trees[i].signed_type) = bitsize_int (int_n_data[i].bitsize);
10193 TYPE_SIZE (int_n_trees[i].unsigned_type) = bitsize_int (int_n_data[i].bitsize);
10194
10195 if (int_n_data[i].bitsize > LONG_LONG_TYPE_SIZE
10196 && int_n_enabled_p[i])
10197 {
10198 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
10199 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
10200 }
10201 }
10202
10203 /* Define a boolean type. This type only represents boolean values but
10204 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
10205 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
10206 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
10207 TYPE_PRECISION (boolean_type_node) = 1;
10208 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
10209
10210 /* Define what type to use for size_t. */
10211 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
10212 size_type_node = unsigned_type_node;
10213 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
10214 size_type_node = long_unsigned_type_node;
10215 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
10216 size_type_node = long_long_unsigned_type_node;
10217 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
10218 size_type_node = short_unsigned_type_node;
10219 else
10220 {
10221 int i;
10222
10223 size_type_node = NULL_TREE;
10224 for (i = 0; i < NUM_INT_N_ENTS; i++)
10225 if (int_n_enabled_p[i])
10226 {
10227 char name[50];
10228 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
10229
10230 if (strcmp (name, SIZE_TYPE) == 0)
10231 {
10232 size_type_node = int_n_trees[i].unsigned_type;
10233 }
10234 }
10235 if (size_type_node == NULL_TREE)
10236 gcc_unreachable ();
10237 }
10238
10239 /* Fill in the rest of the sized types. Reuse existing type nodes
10240 when possible. */
10241 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
10242 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
10243 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
10244 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
10245 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
10246
10247 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
10248 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
10249 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
10250 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
10251 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
10252
10253 /* Don't call build_qualified type for atomics. That routine does
10254 special processing for atomics, and until they are initialized
10255 it's better not to make that call.
10256
10257 Check to see if there is a target override for atomic types. */
10258
10259 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
10260 targetm.atomic_align_for_mode (QImode));
10261 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
10262 targetm.atomic_align_for_mode (HImode));
10263 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
10264 targetm.atomic_align_for_mode (SImode));
10265 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
10266 targetm.atomic_align_for_mode (DImode));
10267 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
10268 targetm.atomic_align_for_mode (TImode));
10269
10270 access_public_node = get_identifier ("public");
10271 access_protected_node = get_identifier ("protected");
10272 access_private_node = get_identifier ("private");
10273
10274 /* Define these next since types below may used them. */
10275 integer_zero_node = build_int_cst (integer_type_node, 0);
10276 integer_one_node = build_int_cst (integer_type_node, 1);
10277 integer_three_node = build_int_cst (integer_type_node, 3);
10278 integer_minus_one_node = build_int_cst (integer_type_node, -1);
10279
10280 size_zero_node = size_int (0);
10281 size_one_node = size_int (1);
10282 bitsize_zero_node = bitsize_int (0);
10283 bitsize_one_node = bitsize_int (1);
10284 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
10285
10286 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
10287 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
10288
10289 void_type_node = make_node (VOID_TYPE);
10290 layout_type (void_type_node);
10291
10292 pointer_bounds_type_node = targetm.chkp_bound_type ();
10293
10294 /* We are not going to have real types in C with less than byte alignment,
10295 so we might as well not have any types that claim to have it. */
10296 SET_TYPE_ALIGN (void_type_node, BITS_PER_UNIT);
10297 TYPE_USER_ALIGN (void_type_node) = 0;
10298
10299 void_node = make_node (VOID_CST);
10300 TREE_TYPE (void_node) = void_type_node;
10301
10302 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
10303 layout_type (TREE_TYPE (null_pointer_node));
10304
10305 ptr_type_node = build_pointer_type (void_type_node);
10306 const_ptr_type_node
10307 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
10308 fileptr_type_node = ptr_type_node;
10309
10310 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
10311
10312 float_type_node = make_node (REAL_TYPE);
10313 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
10314 layout_type (float_type_node);
10315
10316 double_type_node = make_node (REAL_TYPE);
10317 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
10318 layout_type (double_type_node);
10319
10320 long_double_type_node = make_node (REAL_TYPE);
10321 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
10322 layout_type (long_double_type_node);
10323
10324 float_ptr_type_node = build_pointer_type (float_type_node);
10325 double_ptr_type_node = build_pointer_type (double_type_node);
10326 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
10327 integer_ptr_type_node = build_pointer_type (integer_type_node);
10328
10329 /* Fixed size integer types. */
10330 uint16_type_node = make_or_reuse_type (16, 1);
10331 uint32_type_node = make_or_reuse_type (32, 1);
10332 uint64_type_node = make_or_reuse_type (64, 1);
10333
10334 /* Decimal float types. */
10335 dfloat32_type_node = make_node (REAL_TYPE);
10336 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
10337 layout_type (dfloat32_type_node);
10338 SET_TYPE_MODE (dfloat32_type_node, SDmode);
10339 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
10340
10341 dfloat64_type_node = make_node (REAL_TYPE);
10342 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
10343 layout_type (dfloat64_type_node);
10344 SET_TYPE_MODE (dfloat64_type_node, DDmode);
10345 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
10346
10347 dfloat128_type_node = make_node (REAL_TYPE);
10348 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
10349 layout_type (dfloat128_type_node);
10350 SET_TYPE_MODE (dfloat128_type_node, TDmode);
10351 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
10352
10353 complex_integer_type_node = build_complex_type (integer_type_node);
10354 complex_float_type_node = build_complex_type (float_type_node);
10355 complex_double_type_node = build_complex_type (double_type_node);
10356 complex_long_double_type_node = build_complex_type (long_double_type_node);
10357
10358 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
10359 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
10360 sat_ ## KIND ## _type_node = \
10361 make_sat_signed_ ## KIND ## _type (SIZE); \
10362 sat_unsigned_ ## KIND ## _type_node = \
10363 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10364 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10365 unsigned_ ## KIND ## _type_node = \
10366 make_unsigned_ ## KIND ## _type (SIZE);
10367
10368 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
10369 sat_ ## WIDTH ## KIND ## _type_node = \
10370 make_sat_signed_ ## KIND ## _type (SIZE); \
10371 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
10372 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10373 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10374 unsigned_ ## WIDTH ## KIND ## _type_node = \
10375 make_unsigned_ ## KIND ## _type (SIZE);
10376
10377 /* Make fixed-point type nodes based on four different widths. */
10378 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
10379 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
10380 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
10381 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
10382 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
10383
10384 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
10385 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
10386 NAME ## _type_node = \
10387 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
10388 u ## NAME ## _type_node = \
10389 make_or_reuse_unsigned_ ## KIND ## _type \
10390 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
10391 sat_ ## NAME ## _type_node = \
10392 make_or_reuse_sat_signed_ ## KIND ## _type \
10393 (GET_MODE_BITSIZE (MODE ## mode)); \
10394 sat_u ## NAME ## _type_node = \
10395 make_or_reuse_sat_unsigned_ ## KIND ## _type \
10396 (GET_MODE_BITSIZE (U ## MODE ## mode));
10397
10398 /* Fixed-point type and mode nodes. */
10399 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
10400 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
10401 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
10402 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
10403 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
10404 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
10405 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
10406 MAKE_FIXED_MODE_NODE (accum, ha, HA)
10407 MAKE_FIXED_MODE_NODE (accum, sa, SA)
10408 MAKE_FIXED_MODE_NODE (accum, da, DA)
10409 MAKE_FIXED_MODE_NODE (accum, ta, TA)
10410
10411 {
10412 tree t = targetm.build_builtin_va_list ();
10413
10414 /* Many back-ends define record types without setting TYPE_NAME.
10415 If we copied the record type here, we'd keep the original
10416 record type without a name. This breaks name mangling. So,
10417 don't copy record types and let c_common_nodes_and_builtins()
10418 declare the type to be __builtin_va_list. */
10419 if (TREE_CODE (t) != RECORD_TYPE)
10420 t = build_variant_type_copy (t);
10421
10422 va_list_type_node = t;
10423 }
10424 }
10425
10426 /* Modify DECL for given flags.
10427 TM_PURE attribute is set only on types, so the function will modify
10428 DECL's type when ECF_TM_PURE is used. */
10429
10430 void
10431 set_call_expr_flags (tree decl, int flags)
10432 {
10433 if (flags & ECF_NOTHROW)
10434 TREE_NOTHROW (decl) = 1;
10435 if (flags & ECF_CONST)
10436 TREE_READONLY (decl) = 1;
10437 if (flags & ECF_PURE)
10438 DECL_PURE_P (decl) = 1;
10439 if (flags & ECF_LOOPING_CONST_OR_PURE)
10440 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
10441 if (flags & ECF_NOVOPS)
10442 DECL_IS_NOVOPS (decl) = 1;
10443 if (flags & ECF_NORETURN)
10444 TREE_THIS_VOLATILE (decl) = 1;
10445 if (flags & ECF_MALLOC)
10446 DECL_IS_MALLOC (decl) = 1;
10447 if (flags & ECF_RETURNS_TWICE)
10448 DECL_IS_RETURNS_TWICE (decl) = 1;
10449 if (flags & ECF_LEAF)
10450 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
10451 NULL, DECL_ATTRIBUTES (decl));
10452 if (flags & ECF_RET1)
10453 DECL_ATTRIBUTES (decl)
10454 = tree_cons (get_identifier ("fn spec"),
10455 build_tree_list (NULL_TREE, build_string (1, "1")),
10456 DECL_ATTRIBUTES (decl));
10457 if ((flags & ECF_TM_PURE) && flag_tm)
10458 apply_tm_attr (decl, get_identifier ("transaction_pure"));
10459 /* Looping const or pure is implied by noreturn.
10460 There is currently no way to declare looping const or looping pure alone. */
10461 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
10462 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
10463 }
10464
10465
10466 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
10467
10468 static void
10469 local_define_builtin (const char *name, tree type, enum built_in_function code,
10470 const char *library_name, int ecf_flags)
10471 {
10472 tree decl;
10473
10474 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
10475 library_name, NULL_TREE);
10476 set_call_expr_flags (decl, ecf_flags);
10477
10478 set_builtin_decl (code, decl, true);
10479 }
10480
10481 /* Call this function after instantiating all builtins that the language
10482 front end cares about. This will build the rest of the builtins
10483 and internal functions that are relied upon by the tree optimizers and
10484 the middle-end. */
10485
10486 void
10487 build_common_builtin_nodes (void)
10488 {
10489 tree tmp, ftype;
10490 int ecf_flags;
10491
10492 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE)
10493 || !builtin_decl_explicit_p (BUILT_IN_ABORT))
10494 {
10495 ftype = build_function_type (void_type_node, void_list_node);
10496 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
10497 local_define_builtin ("__builtin_unreachable", ftype,
10498 BUILT_IN_UNREACHABLE,
10499 "__builtin_unreachable",
10500 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
10501 | ECF_CONST);
10502 if (!builtin_decl_explicit_p (BUILT_IN_ABORT))
10503 local_define_builtin ("__builtin_abort", ftype, BUILT_IN_ABORT,
10504 "abort",
10505 ECF_LEAF | ECF_NORETURN | ECF_CONST);
10506 }
10507
10508 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
10509 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10510 {
10511 ftype = build_function_type_list (ptr_type_node,
10512 ptr_type_node, const_ptr_type_node,
10513 size_type_node, NULL_TREE);
10514
10515 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
10516 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
10517 "memcpy", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
10518 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10519 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
10520 "memmove", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
10521 }
10522
10523 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
10524 {
10525 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10526 const_ptr_type_node, size_type_node,
10527 NULL_TREE);
10528 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
10529 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10530 }
10531
10532 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
10533 {
10534 ftype = build_function_type_list (ptr_type_node,
10535 ptr_type_node, integer_type_node,
10536 size_type_node, NULL_TREE);
10537 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
10538 "memset", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
10539 }
10540
10541 /* If we're checking the stack, `alloca' can throw. */
10542 const int alloca_flags
10543 = ECF_MALLOC | ECF_LEAF | (flag_stack_check ? 0 : ECF_NOTHROW);
10544
10545 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
10546 {
10547 ftype = build_function_type_list (ptr_type_node,
10548 size_type_node, NULL_TREE);
10549 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
10550 "alloca", alloca_flags);
10551 }
10552
10553 ftype = build_function_type_list (ptr_type_node, size_type_node,
10554 size_type_node, NULL_TREE);
10555 local_define_builtin ("__builtin_alloca_with_align", ftype,
10556 BUILT_IN_ALLOCA_WITH_ALIGN,
10557 "__builtin_alloca_with_align",
10558 alloca_flags);
10559
10560 ftype = build_function_type_list (void_type_node,
10561 ptr_type_node, ptr_type_node,
10562 ptr_type_node, NULL_TREE);
10563 local_define_builtin ("__builtin_init_trampoline", ftype,
10564 BUILT_IN_INIT_TRAMPOLINE,
10565 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
10566 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
10567 BUILT_IN_INIT_HEAP_TRAMPOLINE,
10568 "__builtin_init_heap_trampoline",
10569 ECF_NOTHROW | ECF_LEAF);
10570
10571 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
10572 local_define_builtin ("__builtin_adjust_trampoline", ftype,
10573 BUILT_IN_ADJUST_TRAMPOLINE,
10574 "__builtin_adjust_trampoline",
10575 ECF_CONST | ECF_NOTHROW);
10576
10577 ftype = build_function_type_list (void_type_node,
10578 ptr_type_node, ptr_type_node, NULL_TREE);
10579 local_define_builtin ("__builtin_nonlocal_goto", ftype,
10580 BUILT_IN_NONLOCAL_GOTO,
10581 "__builtin_nonlocal_goto",
10582 ECF_NORETURN | ECF_NOTHROW);
10583
10584 ftype = build_function_type_list (void_type_node,
10585 ptr_type_node, ptr_type_node, NULL_TREE);
10586 local_define_builtin ("__builtin_setjmp_setup", ftype,
10587 BUILT_IN_SETJMP_SETUP,
10588 "__builtin_setjmp_setup", ECF_NOTHROW);
10589
10590 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10591 local_define_builtin ("__builtin_setjmp_receiver", ftype,
10592 BUILT_IN_SETJMP_RECEIVER,
10593 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
10594
10595 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
10596 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
10597 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
10598
10599 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10600 local_define_builtin ("__builtin_stack_restore", ftype,
10601 BUILT_IN_STACK_RESTORE,
10602 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
10603
10604 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10605 const_ptr_type_node, size_type_node,
10606 NULL_TREE);
10607 local_define_builtin ("__builtin_memcmp_eq", ftype, BUILT_IN_MEMCMP_EQ,
10608 "__builtin_memcmp_eq",
10609 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10610
10611 /* If there's a possibility that we might use the ARM EABI, build the
10612 alternate __cxa_end_cleanup node used to resume from C++ and Java. */
10613 if (targetm.arm_eabi_unwinder)
10614 {
10615 ftype = build_function_type_list (void_type_node, NULL_TREE);
10616 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10617 BUILT_IN_CXA_END_CLEANUP,
10618 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10619 }
10620
10621 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10622 local_define_builtin ("__builtin_unwind_resume", ftype,
10623 BUILT_IN_UNWIND_RESUME,
10624 ((targetm_common.except_unwind_info (&global_options)
10625 == UI_SJLJ)
10626 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10627 ECF_NORETURN);
10628
10629 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10630 {
10631 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10632 NULL_TREE);
10633 local_define_builtin ("__builtin_return_address", ftype,
10634 BUILT_IN_RETURN_ADDRESS,
10635 "__builtin_return_address",
10636 ECF_NOTHROW);
10637 }
10638
10639 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10640 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10641 {
10642 ftype = build_function_type_list (void_type_node, ptr_type_node,
10643 ptr_type_node, NULL_TREE);
10644 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10645 local_define_builtin ("__cyg_profile_func_enter", ftype,
10646 BUILT_IN_PROFILE_FUNC_ENTER,
10647 "__cyg_profile_func_enter", 0);
10648 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10649 local_define_builtin ("__cyg_profile_func_exit", ftype,
10650 BUILT_IN_PROFILE_FUNC_EXIT,
10651 "__cyg_profile_func_exit", 0);
10652 }
10653
10654 /* The exception object and filter values from the runtime. The argument
10655 must be zero before exception lowering, i.e. from the front end. After
10656 exception lowering, it will be the region number for the exception
10657 landing pad. These functions are PURE instead of CONST to prevent
10658 them from being hoisted past the exception edge that will initialize
10659 its value in the landing pad. */
10660 ftype = build_function_type_list (ptr_type_node,
10661 integer_type_node, NULL_TREE);
10662 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10663 /* Only use TM_PURE if we have TM language support. */
10664 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10665 ecf_flags |= ECF_TM_PURE;
10666 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10667 "__builtin_eh_pointer", ecf_flags);
10668
10669 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10670 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10671 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10672 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10673
10674 ftype = build_function_type_list (void_type_node,
10675 integer_type_node, integer_type_node,
10676 NULL_TREE);
10677 local_define_builtin ("__builtin_eh_copy_values", ftype,
10678 BUILT_IN_EH_COPY_VALUES,
10679 "__builtin_eh_copy_values", ECF_NOTHROW);
10680
10681 /* Complex multiplication and division. These are handled as builtins
10682 rather than optabs because emit_library_call_value doesn't support
10683 complex. Further, we can do slightly better with folding these
10684 beasties if the real and complex parts of the arguments are separate. */
10685 {
10686 int mode;
10687
10688 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10689 {
10690 char mode_name_buf[4], *q;
10691 const char *p;
10692 enum built_in_function mcode, dcode;
10693 tree type, inner_type;
10694 const char *prefix = "__";
10695
10696 if (targetm.libfunc_gnu_prefix)
10697 prefix = "__gnu_";
10698
10699 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
10700 if (type == NULL)
10701 continue;
10702 inner_type = TREE_TYPE (type);
10703
10704 ftype = build_function_type_list (type, inner_type, inner_type,
10705 inner_type, inner_type, NULL_TREE);
10706
10707 mcode = ((enum built_in_function)
10708 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10709 dcode = ((enum built_in_function)
10710 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10711
10712 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10713 *q = TOLOWER (*p);
10714 *q = '\0';
10715
10716 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10717 NULL);
10718 local_define_builtin (built_in_names[mcode], ftype, mcode,
10719 built_in_names[mcode],
10720 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10721
10722 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10723 NULL);
10724 local_define_builtin (built_in_names[dcode], ftype, dcode,
10725 built_in_names[dcode],
10726 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10727 }
10728 }
10729
10730 init_internal_fns ();
10731 }
10732
10733 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10734 better way.
10735
10736 If we requested a pointer to a vector, build up the pointers that
10737 we stripped off while looking for the inner type. Similarly for
10738 return values from functions.
10739
10740 The argument TYPE is the top of the chain, and BOTTOM is the
10741 new type which we will point to. */
10742
10743 tree
10744 reconstruct_complex_type (tree type, tree bottom)
10745 {
10746 tree inner, outer;
10747
10748 if (TREE_CODE (type) == POINTER_TYPE)
10749 {
10750 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10751 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10752 TYPE_REF_CAN_ALIAS_ALL (type));
10753 }
10754 else if (TREE_CODE (type) == REFERENCE_TYPE)
10755 {
10756 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10757 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10758 TYPE_REF_CAN_ALIAS_ALL (type));
10759 }
10760 else if (TREE_CODE (type) == ARRAY_TYPE)
10761 {
10762 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10763 outer = build_array_type (inner, TYPE_DOMAIN (type));
10764 }
10765 else if (TREE_CODE (type) == FUNCTION_TYPE)
10766 {
10767 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10768 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10769 }
10770 else if (TREE_CODE (type) == METHOD_TYPE)
10771 {
10772 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10773 /* The build_method_type_directly() routine prepends 'this' to argument list,
10774 so we must compensate by getting rid of it. */
10775 outer
10776 = build_method_type_directly
10777 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10778 inner,
10779 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10780 }
10781 else if (TREE_CODE (type) == OFFSET_TYPE)
10782 {
10783 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10784 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10785 }
10786 else
10787 return bottom;
10788
10789 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10790 TYPE_QUALS (type));
10791 }
10792
10793 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10794 the inner type. */
10795 tree
10796 build_vector_type_for_mode (tree innertype, machine_mode mode)
10797 {
10798 int nunits;
10799
10800 switch (GET_MODE_CLASS (mode))
10801 {
10802 case MODE_VECTOR_INT:
10803 case MODE_VECTOR_FLOAT:
10804 case MODE_VECTOR_FRACT:
10805 case MODE_VECTOR_UFRACT:
10806 case MODE_VECTOR_ACCUM:
10807 case MODE_VECTOR_UACCUM:
10808 nunits = GET_MODE_NUNITS (mode);
10809 break;
10810
10811 case MODE_INT:
10812 /* Check that there are no leftover bits. */
10813 gcc_assert (GET_MODE_BITSIZE (mode)
10814 % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10815
10816 nunits = GET_MODE_BITSIZE (mode)
10817 / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10818 break;
10819
10820 default:
10821 gcc_unreachable ();
10822 }
10823
10824 return make_vector_type (innertype, nunits, mode);
10825 }
10826
10827 /* Similarly, but takes the inner type and number of units, which must be
10828 a power of two. */
10829
10830 tree
10831 build_vector_type (tree innertype, int nunits)
10832 {
10833 return make_vector_type (innertype, nunits, VOIDmode);
10834 }
10835
10836 /* Build truth vector with specified length and number of units. */
10837
10838 tree
10839 build_truth_vector_type (unsigned nunits, unsigned vector_size)
10840 {
10841 machine_mode mask_mode = targetm.vectorize.get_mask_mode (nunits,
10842 vector_size);
10843
10844 gcc_assert (mask_mode != VOIDmode);
10845
10846 unsigned HOST_WIDE_INT vsize;
10847 if (mask_mode == BLKmode)
10848 vsize = vector_size * BITS_PER_UNIT;
10849 else
10850 vsize = GET_MODE_BITSIZE (mask_mode);
10851
10852 unsigned HOST_WIDE_INT esize = vsize / nunits;
10853 gcc_assert (esize * nunits == vsize);
10854
10855 tree bool_type = build_nonstandard_boolean_type (esize);
10856
10857 return make_vector_type (bool_type, nunits, mask_mode);
10858 }
10859
10860 /* Returns a vector type corresponding to a comparison of VECTYPE. */
10861
10862 tree
10863 build_same_sized_truth_vector_type (tree vectype)
10864 {
10865 if (VECTOR_BOOLEAN_TYPE_P (vectype))
10866 return vectype;
10867
10868 unsigned HOST_WIDE_INT size = GET_MODE_SIZE (TYPE_MODE (vectype));
10869
10870 if (!size)
10871 size = tree_to_uhwi (TYPE_SIZE_UNIT (vectype));
10872
10873 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (vectype), size);
10874 }
10875
10876 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10877
10878 tree
10879 build_opaque_vector_type (tree innertype, int nunits)
10880 {
10881 tree t = make_vector_type (innertype, nunits, VOIDmode);
10882 tree cand;
10883 /* We always build the non-opaque variant before the opaque one,
10884 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10885 cand = TYPE_NEXT_VARIANT (t);
10886 if (cand
10887 && TYPE_VECTOR_OPAQUE (cand)
10888 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10889 return cand;
10890 /* Othewise build a variant type and make sure to queue it after
10891 the non-opaque type. */
10892 cand = build_distinct_type_copy (t);
10893 TYPE_VECTOR_OPAQUE (cand) = true;
10894 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10895 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10896 TYPE_NEXT_VARIANT (t) = cand;
10897 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10898 return cand;
10899 }
10900
10901
10902 /* Given an initializer INIT, return TRUE if INIT is zero or some
10903 aggregate of zeros. Otherwise return FALSE. */
10904 bool
10905 initializer_zerop (const_tree init)
10906 {
10907 tree elt;
10908
10909 STRIP_NOPS (init);
10910
10911 switch (TREE_CODE (init))
10912 {
10913 case INTEGER_CST:
10914 return integer_zerop (init);
10915
10916 case REAL_CST:
10917 /* ??? Note that this is not correct for C4X float formats. There,
10918 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10919 negative exponent. */
10920 return real_zerop (init)
10921 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
10922
10923 case FIXED_CST:
10924 return fixed_zerop (init);
10925
10926 case COMPLEX_CST:
10927 return integer_zerop (init)
10928 || (real_zerop (init)
10929 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10930 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
10931
10932 case VECTOR_CST:
10933 {
10934 unsigned i;
10935 for (i = 0; i < VECTOR_CST_NELTS (init); ++i)
10936 if (!initializer_zerop (VECTOR_CST_ELT (init, i)))
10937 return false;
10938 return true;
10939 }
10940
10941 case CONSTRUCTOR:
10942 {
10943 unsigned HOST_WIDE_INT idx;
10944
10945 if (TREE_CLOBBER_P (init))
10946 return false;
10947 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10948 if (!initializer_zerop (elt))
10949 return false;
10950 return true;
10951 }
10952
10953 case STRING_CST:
10954 {
10955 int i;
10956
10957 /* We need to loop through all elements to handle cases like
10958 "\0" and "\0foobar". */
10959 for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
10960 if (TREE_STRING_POINTER (init)[i] != '\0')
10961 return false;
10962
10963 return true;
10964 }
10965
10966 default:
10967 return false;
10968 }
10969 }
10970
10971 /* Check if vector VEC consists of all the equal elements and
10972 that the number of elements corresponds to the type of VEC.
10973 The function returns first element of the vector
10974 or NULL_TREE if the vector is not uniform. */
10975 tree
10976 uniform_vector_p (const_tree vec)
10977 {
10978 tree first, t;
10979 unsigned i;
10980
10981 if (vec == NULL_TREE)
10982 return NULL_TREE;
10983
10984 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10985
10986 if (TREE_CODE (vec) == VECTOR_CST)
10987 {
10988 first = VECTOR_CST_ELT (vec, 0);
10989 for (i = 1; i < VECTOR_CST_NELTS (vec); ++i)
10990 if (!operand_equal_p (first, VECTOR_CST_ELT (vec, i), 0))
10991 return NULL_TREE;
10992
10993 return first;
10994 }
10995
10996 else if (TREE_CODE (vec) == CONSTRUCTOR)
10997 {
10998 first = error_mark_node;
10999
11000 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
11001 {
11002 if (i == 0)
11003 {
11004 first = t;
11005 continue;
11006 }
11007 if (!operand_equal_p (first, t, 0))
11008 return NULL_TREE;
11009 }
11010 if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)))
11011 return NULL_TREE;
11012
11013 return first;
11014 }
11015
11016 return NULL_TREE;
11017 }
11018
11019 /* Build an empty statement at location LOC. */
11020
11021 tree
11022 build_empty_stmt (location_t loc)
11023 {
11024 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
11025 SET_EXPR_LOCATION (t, loc);
11026 return t;
11027 }
11028
11029
11030 /* Build an OpenMP clause with code CODE. LOC is the location of the
11031 clause. */
11032
11033 tree
11034 build_omp_clause (location_t loc, enum omp_clause_code code)
11035 {
11036 tree t;
11037 int size, length;
11038
11039 length = omp_clause_num_ops[code];
11040 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
11041
11042 record_node_allocation_statistics (OMP_CLAUSE, size);
11043
11044 t = (tree) ggc_internal_alloc (size);
11045 memset (t, 0, size);
11046 TREE_SET_CODE (t, OMP_CLAUSE);
11047 OMP_CLAUSE_SET_CODE (t, code);
11048 OMP_CLAUSE_LOCATION (t) = loc;
11049
11050 return t;
11051 }
11052
11053 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
11054 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
11055 Except for the CODE and operand count field, other storage for the
11056 object is initialized to zeros. */
11057
11058 tree
11059 build_vl_exp_stat (enum tree_code code, int len MEM_STAT_DECL)
11060 {
11061 tree t;
11062 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
11063
11064 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
11065 gcc_assert (len >= 1);
11066
11067 record_node_allocation_statistics (code, length);
11068
11069 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
11070
11071 TREE_SET_CODE (t, code);
11072
11073 /* Can't use TREE_OPERAND to store the length because if checking is
11074 enabled, it will try to check the length before we store it. :-P */
11075 t->exp.operands[0] = build_int_cst (sizetype, len);
11076
11077 return t;
11078 }
11079
11080 /* Helper function for build_call_* functions; build a CALL_EXPR with
11081 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
11082 the argument slots. */
11083
11084 static tree
11085 build_call_1 (tree return_type, tree fn, int nargs)
11086 {
11087 tree t;
11088
11089 t = build_vl_exp (CALL_EXPR, nargs + 3);
11090 TREE_TYPE (t) = return_type;
11091 CALL_EXPR_FN (t) = fn;
11092 CALL_EXPR_STATIC_CHAIN (t) = NULL;
11093
11094 return t;
11095 }
11096
11097 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11098 FN and a null static chain slot. NARGS is the number of call arguments
11099 which are specified as "..." arguments. */
11100
11101 tree
11102 build_call_nary (tree return_type, tree fn, int nargs, ...)
11103 {
11104 tree ret;
11105 va_list args;
11106 va_start (args, nargs);
11107 ret = build_call_valist (return_type, fn, nargs, args);
11108 va_end (args);
11109 return ret;
11110 }
11111
11112 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11113 FN and a null static chain slot. NARGS is the number of call arguments
11114 which are specified as a va_list ARGS. */
11115
11116 tree
11117 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
11118 {
11119 tree t;
11120 int i;
11121
11122 t = build_call_1 (return_type, fn, nargs);
11123 for (i = 0; i < nargs; i++)
11124 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
11125 process_call_operands (t);
11126 return t;
11127 }
11128
11129 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11130 FN and a null static chain slot. NARGS is the number of call arguments
11131 which are specified as a tree array ARGS. */
11132
11133 tree
11134 build_call_array_loc (location_t loc, tree return_type, tree fn,
11135 int nargs, const tree *args)
11136 {
11137 tree t;
11138 int i;
11139
11140 t = build_call_1 (return_type, fn, nargs);
11141 for (i = 0; i < nargs; i++)
11142 CALL_EXPR_ARG (t, i) = args[i];
11143 process_call_operands (t);
11144 SET_EXPR_LOCATION (t, loc);
11145 return t;
11146 }
11147
11148 /* Like build_call_array, but takes a vec. */
11149
11150 tree
11151 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
11152 {
11153 tree ret, t;
11154 unsigned int ix;
11155
11156 ret = build_call_1 (return_type, fn, vec_safe_length (args));
11157 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
11158 CALL_EXPR_ARG (ret, ix) = t;
11159 process_call_operands (ret);
11160 return ret;
11161 }
11162
11163 /* Conveniently construct a function call expression. FNDECL names the
11164 function to be called and N arguments are passed in the array
11165 ARGARRAY. */
11166
11167 tree
11168 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
11169 {
11170 tree fntype = TREE_TYPE (fndecl);
11171 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11172
11173 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
11174 }
11175
11176 /* Conveniently construct a function call expression. FNDECL names the
11177 function to be called and the arguments are passed in the vector
11178 VEC. */
11179
11180 tree
11181 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
11182 {
11183 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
11184 vec_safe_address (vec));
11185 }
11186
11187
11188 /* Conveniently construct a function call expression. FNDECL names the
11189 function to be called, N is the number of arguments, and the "..."
11190 parameters are the argument expressions. */
11191
11192 tree
11193 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11194 {
11195 va_list ap;
11196 tree *argarray = XALLOCAVEC (tree, n);
11197 int i;
11198
11199 va_start (ap, n);
11200 for (i = 0; i < n; i++)
11201 argarray[i] = va_arg (ap, tree);
11202 va_end (ap);
11203 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11204 }
11205
11206 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11207 varargs macros aren't supported by all bootstrap compilers. */
11208
11209 tree
11210 build_call_expr (tree fndecl, int n, ...)
11211 {
11212 va_list ap;
11213 tree *argarray = XALLOCAVEC (tree, n);
11214 int i;
11215
11216 va_start (ap, n);
11217 for (i = 0; i < n; i++)
11218 argarray[i] = va_arg (ap, tree);
11219 va_end (ap);
11220 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
11221 }
11222
11223 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
11224 type TYPE. This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
11225 It will get gimplified later into an ordinary internal function. */
11226
11227 tree
11228 build_call_expr_internal_loc_array (location_t loc, internal_fn ifn,
11229 tree type, int n, const tree *args)
11230 {
11231 tree t = build_call_1 (type, NULL_TREE, n);
11232 for (int i = 0; i < n; ++i)
11233 CALL_EXPR_ARG (t, i) = args[i];
11234 SET_EXPR_LOCATION (t, loc);
11235 CALL_EXPR_IFN (t) = ifn;
11236 return t;
11237 }
11238
11239 /* Build internal call expression. This is just like CALL_EXPR, except
11240 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
11241 internal function. */
11242
11243 tree
11244 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
11245 tree type, int n, ...)
11246 {
11247 va_list ap;
11248 tree *argarray = XALLOCAVEC (tree, n);
11249 int i;
11250
11251 va_start (ap, n);
11252 for (i = 0; i < n; i++)
11253 argarray[i] = va_arg (ap, tree);
11254 va_end (ap);
11255 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
11256 }
11257
11258 /* Return a function call to FN, if the target is guaranteed to support it,
11259 or null otherwise.
11260
11261 N is the number of arguments, passed in the "...", and TYPE is the
11262 type of the return value. */
11263
11264 tree
11265 maybe_build_call_expr_loc (location_t loc, combined_fn fn, tree type,
11266 int n, ...)
11267 {
11268 va_list ap;
11269 tree *argarray = XALLOCAVEC (tree, n);
11270 int i;
11271
11272 va_start (ap, n);
11273 for (i = 0; i < n; i++)
11274 argarray[i] = va_arg (ap, tree);
11275 va_end (ap);
11276 if (internal_fn_p (fn))
11277 {
11278 internal_fn ifn = as_internal_fn (fn);
11279 if (direct_internal_fn_p (ifn))
11280 {
11281 tree_pair types = direct_internal_fn_types (ifn, type, argarray);
11282 if (!direct_internal_fn_supported_p (ifn, types,
11283 OPTIMIZE_FOR_BOTH))
11284 return NULL_TREE;
11285 }
11286 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
11287 }
11288 else
11289 {
11290 tree fndecl = builtin_decl_implicit (as_builtin_fn (fn));
11291 if (!fndecl)
11292 return NULL_TREE;
11293 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11294 }
11295 }
11296
11297 /* Create a new constant string literal and return a char* pointer to it.
11298 The STRING_CST value is the LEN characters at STR. */
11299 tree
11300 build_string_literal (int len, const char *str)
11301 {
11302 tree t, elem, index, type;
11303
11304 t = build_string (len, str);
11305 elem = build_type_variant (char_type_node, 1, 0);
11306 index = build_index_type (size_int (len - 1));
11307 type = build_array_type (elem, index);
11308 TREE_TYPE (t) = type;
11309 TREE_CONSTANT (t) = 1;
11310 TREE_READONLY (t) = 1;
11311 TREE_STATIC (t) = 1;
11312
11313 type = build_pointer_type (elem);
11314 t = build1 (ADDR_EXPR, type,
11315 build4 (ARRAY_REF, elem,
11316 t, integer_zero_node, NULL_TREE, NULL_TREE));
11317 return t;
11318 }
11319
11320
11321
11322 /* Return true if T (assumed to be a DECL) must be assigned a memory
11323 location. */
11324
11325 bool
11326 needs_to_live_in_memory (const_tree t)
11327 {
11328 return (TREE_ADDRESSABLE (t)
11329 || is_global_var (t)
11330 || (TREE_CODE (t) == RESULT_DECL
11331 && !DECL_BY_REFERENCE (t)
11332 && aggregate_value_p (t, current_function_decl)));
11333 }
11334
11335 /* Return value of a constant X and sign-extend it. */
11336
11337 HOST_WIDE_INT
11338 int_cst_value (const_tree x)
11339 {
11340 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
11341 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
11342
11343 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
11344 gcc_assert (cst_and_fits_in_hwi (x));
11345
11346 if (bits < HOST_BITS_PER_WIDE_INT)
11347 {
11348 bool negative = ((val >> (bits - 1)) & 1) != 0;
11349 if (negative)
11350 val |= (~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1;
11351 else
11352 val &= ~((~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1);
11353 }
11354
11355 return val;
11356 }
11357
11358 /* If TYPE is an integral or pointer type, return an integer type with
11359 the same precision which is unsigned iff UNSIGNEDP is true, or itself
11360 if TYPE is already an integer type of signedness UNSIGNEDP. */
11361
11362 tree
11363 signed_or_unsigned_type_for (int unsignedp, tree type)
11364 {
11365 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
11366 return type;
11367
11368 if (TREE_CODE (type) == VECTOR_TYPE)
11369 {
11370 tree inner = TREE_TYPE (type);
11371 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11372 if (!inner2)
11373 return NULL_TREE;
11374 if (inner == inner2)
11375 return type;
11376 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
11377 }
11378
11379 if (!INTEGRAL_TYPE_P (type)
11380 && !POINTER_TYPE_P (type)
11381 && TREE_CODE (type) != OFFSET_TYPE)
11382 return NULL_TREE;
11383
11384 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
11385 }
11386
11387 /* If TYPE is an integral or pointer type, return an integer type with
11388 the same precision which is unsigned, or itself if TYPE is already an
11389 unsigned integer type. */
11390
11391 tree
11392 unsigned_type_for (tree type)
11393 {
11394 return signed_or_unsigned_type_for (1, type);
11395 }
11396
11397 /* If TYPE is an integral or pointer type, return an integer type with
11398 the same precision which is signed, or itself if TYPE is already a
11399 signed integer type. */
11400
11401 tree
11402 signed_type_for (tree type)
11403 {
11404 return signed_or_unsigned_type_for (0, type);
11405 }
11406
11407 /* If TYPE is a vector type, return a signed integer vector type with the
11408 same width and number of subparts. Otherwise return boolean_type_node. */
11409
11410 tree
11411 truth_type_for (tree type)
11412 {
11413 if (TREE_CODE (type) == VECTOR_TYPE)
11414 {
11415 if (VECTOR_BOOLEAN_TYPE_P (type))
11416 return type;
11417 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (type),
11418 GET_MODE_SIZE (TYPE_MODE (type)));
11419 }
11420 else
11421 return boolean_type_node;
11422 }
11423
11424 /* Returns the largest value obtainable by casting something in INNER type to
11425 OUTER type. */
11426
11427 tree
11428 upper_bound_in_type (tree outer, tree inner)
11429 {
11430 unsigned int det = 0;
11431 unsigned oprec = TYPE_PRECISION (outer);
11432 unsigned iprec = TYPE_PRECISION (inner);
11433 unsigned prec;
11434
11435 /* Compute a unique number for every combination. */
11436 det |= (oprec > iprec) ? 4 : 0;
11437 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
11438 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
11439
11440 /* Determine the exponent to use. */
11441 switch (det)
11442 {
11443 case 0:
11444 case 1:
11445 /* oprec <= iprec, outer: signed, inner: don't care. */
11446 prec = oprec - 1;
11447 break;
11448 case 2:
11449 case 3:
11450 /* oprec <= iprec, outer: unsigned, inner: don't care. */
11451 prec = oprec;
11452 break;
11453 case 4:
11454 /* oprec > iprec, outer: signed, inner: signed. */
11455 prec = iprec - 1;
11456 break;
11457 case 5:
11458 /* oprec > iprec, outer: signed, inner: unsigned. */
11459 prec = iprec;
11460 break;
11461 case 6:
11462 /* oprec > iprec, outer: unsigned, inner: signed. */
11463 prec = oprec;
11464 break;
11465 case 7:
11466 /* oprec > iprec, outer: unsigned, inner: unsigned. */
11467 prec = iprec;
11468 break;
11469 default:
11470 gcc_unreachable ();
11471 }
11472
11473 return wide_int_to_tree (outer,
11474 wi::mask (prec, false, TYPE_PRECISION (outer)));
11475 }
11476
11477 /* Returns the smallest value obtainable by casting something in INNER type to
11478 OUTER type. */
11479
11480 tree
11481 lower_bound_in_type (tree outer, tree inner)
11482 {
11483 unsigned oprec = TYPE_PRECISION (outer);
11484 unsigned iprec = TYPE_PRECISION (inner);
11485
11486 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
11487 and obtain 0. */
11488 if (TYPE_UNSIGNED (outer)
11489 /* If we are widening something of an unsigned type, OUTER type
11490 contains all values of INNER type. In particular, both INNER
11491 and OUTER types have zero in common. */
11492 || (oprec > iprec && TYPE_UNSIGNED (inner)))
11493 return build_int_cst (outer, 0);
11494 else
11495 {
11496 /* If we are widening a signed type to another signed type, we
11497 want to obtain -2^^(iprec-1). If we are keeping the
11498 precision or narrowing to a signed type, we want to obtain
11499 -2^(oprec-1). */
11500 unsigned prec = oprec > iprec ? iprec : oprec;
11501 return wide_int_to_tree (outer,
11502 wi::mask (prec - 1, true,
11503 TYPE_PRECISION (outer)));
11504 }
11505 }
11506
11507 /* Return nonzero if two operands that are suitable for PHI nodes are
11508 necessarily equal. Specifically, both ARG0 and ARG1 must be either
11509 SSA_NAME or invariant. Note that this is strictly an optimization.
11510 That is, callers of this function can directly call operand_equal_p
11511 and get the same result, only slower. */
11512
11513 int
11514 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
11515 {
11516 if (arg0 == arg1)
11517 return 1;
11518 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
11519 return 0;
11520 return operand_equal_p (arg0, arg1, 0);
11521 }
11522
11523 /* Returns number of zeros at the end of binary representation of X. */
11524
11525 tree
11526 num_ending_zeros (const_tree x)
11527 {
11528 return build_int_cst (TREE_TYPE (x), wi::ctz (x));
11529 }
11530
11531
11532 #define WALK_SUBTREE(NODE) \
11533 do \
11534 { \
11535 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
11536 if (result) \
11537 return result; \
11538 } \
11539 while (0)
11540
11541 /* This is a subroutine of walk_tree that walks field of TYPE that are to
11542 be walked whenever a type is seen in the tree. Rest of operands and return
11543 value are as for walk_tree. */
11544
11545 static tree
11546 walk_type_fields (tree type, walk_tree_fn func, void *data,
11547 hash_set<tree> *pset, walk_tree_lh lh)
11548 {
11549 tree result = NULL_TREE;
11550
11551 switch (TREE_CODE (type))
11552 {
11553 case POINTER_TYPE:
11554 case REFERENCE_TYPE:
11555 case VECTOR_TYPE:
11556 /* We have to worry about mutually recursive pointers. These can't
11557 be written in C. They can in Ada. It's pathological, but
11558 there's an ACATS test (c38102a) that checks it. Deal with this
11559 by checking if we're pointing to another pointer, that one
11560 points to another pointer, that one does too, and we have no htab.
11561 If so, get a hash table. We check three levels deep to avoid
11562 the cost of the hash table if we don't need one. */
11563 if (POINTER_TYPE_P (TREE_TYPE (type))
11564 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
11565 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
11566 && !pset)
11567 {
11568 result = walk_tree_without_duplicates (&TREE_TYPE (type),
11569 func, data);
11570 if (result)
11571 return result;
11572
11573 break;
11574 }
11575
11576 /* ... fall through ... */
11577
11578 case COMPLEX_TYPE:
11579 WALK_SUBTREE (TREE_TYPE (type));
11580 break;
11581
11582 case METHOD_TYPE:
11583 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
11584
11585 /* Fall through. */
11586
11587 case FUNCTION_TYPE:
11588 WALK_SUBTREE (TREE_TYPE (type));
11589 {
11590 tree arg;
11591
11592 /* We never want to walk into default arguments. */
11593 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
11594 WALK_SUBTREE (TREE_VALUE (arg));
11595 }
11596 break;
11597
11598 case ARRAY_TYPE:
11599 /* Don't follow this nodes's type if a pointer for fear that
11600 we'll have infinite recursion. If we have a PSET, then we
11601 need not fear. */
11602 if (pset
11603 || (!POINTER_TYPE_P (TREE_TYPE (type))
11604 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
11605 WALK_SUBTREE (TREE_TYPE (type));
11606 WALK_SUBTREE (TYPE_DOMAIN (type));
11607 break;
11608
11609 case OFFSET_TYPE:
11610 WALK_SUBTREE (TREE_TYPE (type));
11611 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
11612 break;
11613
11614 default:
11615 break;
11616 }
11617
11618 return NULL_TREE;
11619 }
11620
11621 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11622 called with the DATA and the address of each sub-tree. If FUNC returns a
11623 non-NULL value, the traversal is stopped, and the value returned by FUNC
11624 is returned. If PSET is non-NULL it is used to record the nodes visited,
11625 and to avoid visiting a node more than once. */
11626
11627 tree
11628 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
11629 hash_set<tree> *pset, walk_tree_lh lh)
11630 {
11631 enum tree_code code;
11632 int walk_subtrees;
11633 tree result;
11634
11635 #define WALK_SUBTREE_TAIL(NODE) \
11636 do \
11637 { \
11638 tp = & (NODE); \
11639 goto tail_recurse; \
11640 } \
11641 while (0)
11642
11643 tail_recurse:
11644 /* Skip empty subtrees. */
11645 if (!*tp)
11646 return NULL_TREE;
11647
11648 /* Don't walk the same tree twice, if the user has requested
11649 that we avoid doing so. */
11650 if (pset && pset->add (*tp))
11651 return NULL_TREE;
11652
11653 /* Call the function. */
11654 walk_subtrees = 1;
11655 result = (*func) (tp, &walk_subtrees, data);
11656
11657 /* If we found something, return it. */
11658 if (result)
11659 return result;
11660
11661 code = TREE_CODE (*tp);
11662
11663 /* Even if we didn't, FUNC may have decided that there was nothing
11664 interesting below this point in the tree. */
11665 if (!walk_subtrees)
11666 {
11667 /* But we still need to check our siblings. */
11668 if (code == TREE_LIST)
11669 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11670 else if (code == OMP_CLAUSE)
11671 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11672 else
11673 return NULL_TREE;
11674 }
11675
11676 if (lh)
11677 {
11678 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11679 if (result || !walk_subtrees)
11680 return result;
11681 }
11682
11683 switch (code)
11684 {
11685 case ERROR_MARK:
11686 case IDENTIFIER_NODE:
11687 case INTEGER_CST:
11688 case REAL_CST:
11689 case FIXED_CST:
11690 case VECTOR_CST:
11691 case STRING_CST:
11692 case BLOCK:
11693 case PLACEHOLDER_EXPR:
11694 case SSA_NAME:
11695 case FIELD_DECL:
11696 case RESULT_DECL:
11697 /* None of these have subtrees other than those already walked
11698 above. */
11699 break;
11700
11701 case TREE_LIST:
11702 WALK_SUBTREE (TREE_VALUE (*tp));
11703 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11704 break;
11705
11706 case TREE_VEC:
11707 {
11708 int len = TREE_VEC_LENGTH (*tp);
11709
11710 if (len == 0)
11711 break;
11712
11713 /* Walk all elements but the first. */
11714 while (--len)
11715 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11716
11717 /* Now walk the first one as a tail call. */
11718 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11719 }
11720
11721 case COMPLEX_CST:
11722 WALK_SUBTREE (TREE_REALPART (*tp));
11723 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11724
11725 case CONSTRUCTOR:
11726 {
11727 unsigned HOST_WIDE_INT idx;
11728 constructor_elt *ce;
11729
11730 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11731 idx++)
11732 WALK_SUBTREE (ce->value);
11733 }
11734 break;
11735
11736 case SAVE_EXPR:
11737 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11738
11739 case BIND_EXPR:
11740 {
11741 tree decl;
11742 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11743 {
11744 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11745 into declarations that are just mentioned, rather than
11746 declared; they don't really belong to this part of the tree.
11747 And, we can see cycles: the initializer for a declaration
11748 can refer to the declaration itself. */
11749 WALK_SUBTREE (DECL_INITIAL (decl));
11750 WALK_SUBTREE (DECL_SIZE (decl));
11751 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11752 }
11753 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11754 }
11755
11756 case STATEMENT_LIST:
11757 {
11758 tree_stmt_iterator i;
11759 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11760 WALK_SUBTREE (*tsi_stmt_ptr (i));
11761 }
11762 break;
11763
11764 case OMP_CLAUSE:
11765 switch (OMP_CLAUSE_CODE (*tp))
11766 {
11767 case OMP_CLAUSE_GANG:
11768 case OMP_CLAUSE__GRIDDIM_:
11769 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11770 /* FALLTHRU */
11771
11772 case OMP_CLAUSE_ASYNC:
11773 case OMP_CLAUSE_WAIT:
11774 case OMP_CLAUSE_WORKER:
11775 case OMP_CLAUSE_VECTOR:
11776 case OMP_CLAUSE_NUM_GANGS:
11777 case OMP_CLAUSE_NUM_WORKERS:
11778 case OMP_CLAUSE_VECTOR_LENGTH:
11779 case OMP_CLAUSE_PRIVATE:
11780 case OMP_CLAUSE_SHARED:
11781 case OMP_CLAUSE_FIRSTPRIVATE:
11782 case OMP_CLAUSE_COPYIN:
11783 case OMP_CLAUSE_COPYPRIVATE:
11784 case OMP_CLAUSE_FINAL:
11785 case OMP_CLAUSE_IF:
11786 case OMP_CLAUSE_NUM_THREADS:
11787 case OMP_CLAUSE_SCHEDULE:
11788 case OMP_CLAUSE_UNIFORM:
11789 case OMP_CLAUSE_DEPEND:
11790 case OMP_CLAUSE_NUM_TEAMS:
11791 case OMP_CLAUSE_THREAD_LIMIT:
11792 case OMP_CLAUSE_DEVICE:
11793 case OMP_CLAUSE_DIST_SCHEDULE:
11794 case OMP_CLAUSE_SAFELEN:
11795 case OMP_CLAUSE_SIMDLEN:
11796 case OMP_CLAUSE_ORDERED:
11797 case OMP_CLAUSE_PRIORITY:
11798 case OMP_CLAUSE_GRAINSIZE:
11799 case OMP_CLAUSE_NUM_TASKS:
11800 case OMP_CLAUSE_HINT:
11801 case OMP_CLAUSE_TO_DECLARE:
11802 case OMP_CLAUSE_LINK:
11803 case OMP_CLAUSE_USE_DEVICE_PTR:
11804 case OMP_CLAUSE_IS_DEVICE_PTR:
11805 case OMP_CLAUSE__LOOPTEMP_:
11806 case OMP_CLAUSE__SIMDUID_:
11807 case OMP_CLAUSE__CILK_FOR_COUNT_:
11808 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11809 /* FALLTHRU */
11810
11811 case OMP_CLAUSE_INDEPENDENT:
11812 case OMP_CLAUSE_NOWAIT:
11813 case OMP_CLAUSE_DEFAULT:
11814 case OMP_CLAUSE_UNTIED:
11815 case OMP_CLAUSE_MERGEABLE:
11816 case OMP_CLAUSE_PROC_BIND:
11817 case OMP_CLAUSE_INBRANCH:
11818 case OMP_CLAUSE_NOTINBRANCH:
11819 case OMP_CLAUSE_FOR:
11820 case OMP_CLAUSE_PARALLEL:
11821 case OMP_CLAUSE_SECTIONS:
11822 case OMP_CLAUSE_TASKGROUP:
11823 case OMP_CLAUSE_NOGROUP:
11824 case OMP_CLAUSE_THREADS:
11825 case OMP_CLAUSE_SIMD:
11826 case OMP_CLAUSE_DEFAULTMAP:
11827 case OMP_CLAUSE_AUTO:
11828 case OMP_CLAUSE_SEQ:
11829 case OMP_CLAUSE_TILE:
11830 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11831
11832 case OMP_CLAUSE_LASTPRIVATE:
11833 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11834 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11835 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11836
11837 case OMP_CLAUSE_COLLAPSE:
11838 {
11839 int i;
11840 for (i = 0; i < 3; i++)
11841 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11842 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11843 }
11844
11845 case OMP_CLAUSE_LINEAR:
11846 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11847 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
11848 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
11849 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11850
11851 case OMP_CLAUSE_ALIGNED:
11852 case OMP_CLAUSE_FROM:
11853 case OMP_CLAUSE_TO:
11854 case OMP_CLAUSE_MAP:
11855 case OMP_CLAUSE__CACHE_:
11856 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11857 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11858 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11859
11860 case OMP_CLAUSE_REDUCTION:
11861 {
11862 int i;
11863 for (i = 0; i < 5; i++)
11864 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11865 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11866 }
11867
11868 default:
11869 gcc_unreachable ();
11870 }
11871 break;
11872
11873 case TARGET_EXPR:
11874 {
11875 int i, len;
11876
11877 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11878 But, we only want to walk once. */
11879 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11880 for (i = 0; i < len; ++i)
11881 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11882 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11883 }
11884
11885 case DECL_EXPR:
11886 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11887 defining. We only want to walk into these fields of a type in this
11888 case and not in the general case of a mere reference to the type.
11889
11890 The criterion is as follows: if the field can be an expression, it
11891 must be walked only here. This should be in keeping with the fields
11892 that are directly gimplified in gimplify_type_sizes in order for the
11893 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11894 variable-sized types.
11895
11896 Note that DECLs get walked as part of processing the BIND_EXPR. */
11897 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11898 {
11899 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11900 if (TREE_CODE (*type_p) == ERROR_MARK)
11901 return NULL_TREE;
11902
11903 /* Call the function for the type. See if it returns anything or
11904 doesn't want us to continue. If we are to continue, walk both
11905 the normal fields and those for the declaration case. */
11906 result = (*func) (type_p, &walk_subtrees, data);
11907 if (result || !walk_subtrees)
11908 return result;
11909
11910 /* But do not walk a pointed-to type since it may itself need to
11911 be walked in the declaration case if it isn't anonymous. */
11912 if (!POINTER_TYPE_P (*type_p))
11913 {
11914 result = walk_type_fields (*type_p, func, data, pset, lh);
11915 if (result)
11916 return result;
11917 }
11918
11919 /* If this is a record type, also walk the fields. */
11920 if (RECORD_OR_UNION_TYPE_P (*type_p))
11921 {
11922 tree field;
11923
11924 for (field = TYPE_FIELDS (*type_p); field;
11925 field = DECL_CHAIN (field))
11926 {
11927 /* We'd like to look at the type of the field, but we can
11928 easily get infinite recursion. So assume it's pointed
11929 to elsewhere in the tree. Also, ignore things that
11930 aren't fields. */
11931 if (TREE_CODE (field) != FIELD_DECL)
11932 continue;
11933
11934 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11935 WALK_SUBTREE (DECL_SIZE (field));
11936 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11937 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11938 WALK_SUBTREE (DECL_QUALIFIER (field));
11939 }
11940 }
11941
11942 /* Same for scalar types. */
11943 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11944 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11945 || TREE_CODE (*type_p) == INTEGER_TYPE
11946 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11947 || TREE_CODE (*type_p) == REAL_TYPE)
11948 {
11949 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11950 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11951 }
11952
11953 WALK_SUBTREE (TYPE_SIZE (*type_p));
11954 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11955 }
11956 /* FALLTHRU */
11957
11958 default:
11959 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11960 {
11961 int i, len;
11962
11963 /* Walk over all the sub-trees of this operand. */
11964 len = TREE_OPERAND_LENGTH (*tp);
11965
11966 /* Go through the subtrees. We need to do this in forward order so
11967 that the scope of a FOR_EXPR is handled properly. */
11968 if (len)
11969 {
11970 for (i = 0; i < len - 1; ++i)
11971 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11972 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11973 }
11974 }
11975 /* If this is a type, walk the needed fields in the type. */
11976 else if (TYPE_P (*tp))
11977 return walk_type_fields (*tp, func, data, pset, lh);
11978 break;
11979 }
11980
11981 /* We didn't find what we were looking for. */
11982 return NULL_TREE;
11983
11984 #undef WALK_SUBTREE_TAIL
11985 }
11986 #undef WALK_SUBTREE
11987
11988 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11989
11990 tree
11991 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11992 walk_tree_lh lh)
11993 {
11994 tree result;
11995
11996 hash_set<tree> pset;
11997 result = walk_tree_1 (tp, func, data, &pset, lh);
11998 return result;
11999 }
12000
12001
12002 tree
12003 tree_block (tree t)
12004 {
12005 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
12006
12007 if (IS_EXPR_CODE_CLASS (c))
12008 return LOCATION_BLOCK (t->exp.locus);
12009 gcc_unreachable ();
12010 return NULL;
12011 }
12012
12013 void
12014 tree_set_block (tree t, tree b)
12015 {
12016 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
12017
12018 if (IS_EXPR_CODE_CLASS (c))
12019 {
12020 t->exp.locus = set_block (t->exp.locus, b);
12021 }
12022 else
12023 gcc_unreachable ();
12024 }
12025
12026 /* Create a nameless artificial label and put it in the current
12027 function context. The label has a location of LOC. Returns the
12028 newly created label. */
12029
12030 tree
12031 create_artificial_label (location_t loc)
12032 {
12033 tree lab = build_decl (loc,
12034 LABEL_DECL, NULL_TREE, void_type_node);
12035
12036 DECL_ARTIFICIAL (lab) = 1;
12037 DECL_IGNORED_P (lab) = 1;
12038 DECL_CONTEXT (lab) = current_function_decl;
12039 return lab;
12040 }
12041
12042 /* Given a tree, try to return a useful variable name that we can use
12043 to prefix a temporary that is being assigned the value of the tree.
12044 I.E. given <temp> = &A, return A. */
12045
12046 const char *
12047 get_name (tree t)
12048 {
12049 tree stripped_decl;
12050
12051 stripped_decl = t;
12052 STRIP_NOPS (stripped_decl);
12053 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
12054 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
12055 else if (TREE_CODE (stripped_decl) == SSA_NAME)
12056 {
12057 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
12058 if (!name)
12059 return NULL;
12060 return IDENTIFIER_POINTER (name);
12061 }
12062 else
12063 {
12064 switch (TREE_CODE (stripped_decl))
12065 {
12066 case ADDR_EXPR:
12067 return get_name (TREE_OPERAND (stripped_decl, 0));
12068 default:
12069 return NULL;
12070 }
12071 }
12072 }
12073
12074 /* Return true if TYPE has a variable argument list. */
12075
12076 bool
12077 stdarg_p (const_tree fntype)
12078 {
12079 function_args_iterator args_iter;
12080 tree n = NULL_TREE, t;
12081
12082 if (!fntype)
12083 return false;
12084
12085 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
12086 {
12087 n = t;
12088 }
12089
12090 return n != NULL_TREE && n != void_type_node;
12091 }
12092
12093 /* Return true if TYPE has a prototype. */
12094
12095 bool
12096 prototype_p (const_tree fntype)
12097 {
12098 tree t;
12099
12100 gcc_assert (fntype != NULL_TREE);
12101
12102 t = TYPE_ARG_TYPES (fntype);
12103 return (t != NULL_TREE);
12104 }
12105
12106 /* If BLOCK is inlined from an __attribute__((__artificial__))
12107 routine, return pointer to location from where it has been
12108 called. */
12109 location_t *
12110 block_nonartificial_location (tree block)
12111 {
12112 location_t *ret = NULL;
12113
12114 while (block && TREE_CODE (block) == BLOCK
12115 && BLOCK_ABSTRACT_ORIGIN (block))
12116 {
12117 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
12118
12119 while (TREE_CODE (ao) == BLOCK
12120 && BLOCK_ABSTRACT_ORIGIN (ao)
12121 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
12122 ao = BLOCK_ABSTRACT_ORIGIN (ao);
12123
12124 if (TREE_CODE (ao) == FUNCTION_DECL)
12125 {
12126 /* If AO is an artificial inline, point RET to the
12127 call site locus at which it has been inlined and continue
12128 the loop, in case AO's caller is also an artificial
12129 inline. */
12130 if (DECL_DECLARED_INLINE_P (ao)
12131 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
12132 ret = &BLOCK_SOURCE_LOCATION (block);
12133 else
12134 break;
12135 }
12136 else if (TREE_CODE (ao) != BLOCK)
12137 break;
12138
12139 block = BLOCK_SUPERCONTEXT (block);
12140 }
12141 return ret;
12142 }
12143
12144
12145 /* If EXP is inlined from an __attribute__((__artificial__))
12146 function, return the location of the original call expression. */
12147
12148 location_t
12149 tree_nonartificial_location (tree exp)
12150 {
12151 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
12152
12153 if (loc)
12154 return *loc;
12155 else
12156 return EXPR_LOCATION (exp);
12157 }
12158
12159
12160 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
12161 nodes. */
12162
12163 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
12164
12165 hashval_t
12166 cl_option_hasher::hash (tree x)
12167 {
12168 const_tree const t = x;
12169 const char *p;
12170 size_t i;
12171 size_t len = 0;
12172 hashval_t hash = 0;
12173
12174 if (TREE_CODE (t) == OPTIMIZATION_NODE)
12175 {
12176 p = (const char *)TREE_OPTIMIZATION (t);
12177 len = sizeof (struct cl_optimization);
12178 }
12179
12180 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
12181 return cl_target_option_hash (TREE_TARGET_OPTION (t));
12182
12183 else
12184 gcc_unreachable ();
12185
12186 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
12187 something else. */
12188 for (i = 0; i < len; i++)
12189 if (p[i])
12190 hash = (hash << 4) ^ ((i << 2) | p[i]);
12191
12192 return hash;
12193 }
12194
12195 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
12196 TARGET_OPTION tree node) is the same as that given by *Y, which is the
12197 same. */
12198
12199 bool
12200 cl_option_hasher::equal (tree x, tree y)
12201 {
12202 const_tree const xt = x;
12203 const_tree const yt = y;
12204 const char *xp;
12205 const char *yp;
12206 size_t len;
12207
12208 if (TREE_CODE (xt) != TREE_CODE (yt))
12209 return 0;
12210
12211 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
12212 {
12213 xp = (const char *)TREE_OPTIMIZATION (xt);
12214 yp = (const char *)TREE_OPTIMIZATION (yt);
12215 len = sizeof (struct cl_optimization);
12216 }
12217
12218 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
12219 {
12220 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
12221 TREE_TARGET_OPTION (yt));
12222 }
12223
12224 else
12225 gcc_unreachable ();
12226
12227 return (memcmp (xp, yp, len) == 0);
12228 }
12229
12230 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
12231
12232 tree
12233 build_optimization_node (struct gcc_options *opts)
12234 {
12235 tree t;
12236
12237 /* Use the cache of optimization nodes. */
12238
12239 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
12240 opts);
12241
12242 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
12243 t = *slot;
12244 if (!t)
12245 {
12246 /* Insert this one into the hash table. */
12247 t = cl_optimization_node;
12248 *slot = t;
12249
12250 /* Make a new node for next time round. */
12251 cl_optimization_node = make_node (OPTIMIZATION_NODE);
12252 }
12253
12254 return t;
12255 }
12256
12257 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
12258
12259 tree
12260 build_target_option_node (struct gcc_options *opts)
12261 {
12262 tree t;
12263
12264 /* Use the cache of optimization nodes. */
12265
12266 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
12267 opts);
12268
12269 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
12270 t = *slot;
12271 if (!t)
12272 {
12273 /* Insert this one into the hash table. */
12274 t = cl_target_option_node;
12275 *slot = t;
12276
12277 /* Make a new node for next time round. */
12278 cl_target_option_node = make_node (TARGET_OPTION_NODE);
12279 }
12280
12281 return t;
12282 }
12283
12284 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
12285 so that they aren't saved during PCH writing. */
12286
12287 void
12288 prepare_target_option_nodes_for_pch (void)
12289 {
12290 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
12291 for (; iter != cl_option_hash_table->end (); ++iter)
12292 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
12293 TREE_TARGET_GLOBALS (*iter) = NULL;
12294 }
12295
12296 /* Determine the "ultimate origin" of a block. The block may be an inlined
12297 instance of an inlined instance of a block which is local to an inline
12298 function, so we have to trace all of the way back through the origin chain
12299 to find out what sort of node actually served as the original seed for the
12300 given block. */
12301
12302 tree
12303 block_ultimate_origin (const_tree block)
12304 {
12305 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
12306
12307 /* BLOCK_ABSTRACT_ORIGIN can point to itself; ignore that if
12308 we're trying to output the abstract instance of this function. */
12309 if (BLOCK_ABSTRACT (block) && immediate_origin == block)
12310 return NULL_TREE;
12311
12312 if (immediate_origin == NULL_TREE)
12313 return NULL_TREE;
12314 else
12315 {
12316 tree ret_val;
12317 tree lookahead = immediate_origin;
12318
12319 do
12320 {
12321 ret_val = lookahead;
12322 lookahead = (TREE_CODE (ret_val) == BLOCK
12323 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
12324 }
12325 while (lookahead != NULL && lookahead != ret_val);
12326
12327 /* The block's abstract origin chain may not be the *ultimate* origin of
12328 the block. It could lead to a DECL that has an abstract origin set.
12329 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
12330 will give us if it has one). Note that DECL's abstract origins are
12331 supposed to be the most distant ancestor (or so decl_ultimate_origin
12332 claims), so we don't need to loop following the DECL origins. */
12333 if (DECL_P (ret_val))
12334 return DECL_ORIGIN (ret_val);
12335
12336 return ret_val;
12337 }
12338 }
12339
12340 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
12341 no instruction. */
12342
12343 bool
12344 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
12345 {
12346 /* Do not strip casts into or out of differing address spaces. */
12347 if (POINTER_TYPE_P (outer_type)
12348 && TYPE_ADDR_SPACE (TREE_TYPE (outer_type)) != ADDR_SPACE_GENERIC)
12349 {
12350 if (!POINTER_TYPE_P (inner_type)
12351 || (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
12352 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type))))
12353 return false;
12354 }
12355 else if (POINTER_TYPE_P (inner_type)
12356 && TYPE_ADDR_SPACE (TREE_TYPE (inner_type)) != ADDR_SPACE_GENERIC)
12357 {
12358 /* We already know that outer_type is not a pointer with
12359 a non-generic address space. */
12360 return false;
12361 }
12362
12363 /* Use precision rather then machine mode when we can, which gives
12364 the correct answer even for submode (bit-field) types. */
12365 if ((INTEGRAL_TYPE_P (outer_type)
12366 || POINTER_TYPE_P (outer_type)
12367 || TREE_CODE (outer_type) == OFFSET_TYPE)
12368 && (INTEGRAL_TYPE_P (inner_type)
12369 || POINTER_TYPE_P (inner_type)
12370 || TREE_CODE (inner_type) == OFFSET_TYPE))
12371 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
12372
12373 /* Otherwise fall back on comparing machine modes (e.g. for
12374 aggregate types, floats). */
12375 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
12376 }
12377
12378 /* Return true iff conversion in EXP generates no instruction. Mark
12379 it inline so that we fully inline into the stripping functions even
12380 though we have two uses of this function. */
12381
12382 static inline bool
12383 tree_nop_conversion (const_tree exp)
12384 {
12385 tree outer_type, inner_type;
12386
12387 if (!CONVERT_EXPR_P (exp)
12388 && TREE_CODE (exp) != NON_LVALUE_EXPR)
12389 return false;
12390 if (TREE_OPERAND (exp, 0) == error_mark_node)
12391 return false;
12392
12393 outer_type = TREE_TYPE (exp);
12394 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12395
12396 if (!inner_type)
12397 return false;
12398
12399 return tree_nop_conversion_p (outer_type, inner_type);
12400 }
12401
12402 /* Return true iff conversion in EXP generates no instruction. Don't
12403 consider conversions changing the signedness. */
12404
12405 static bool
12406 tree_sign_nop_conversion (const_tree exp)
12407 {
12408 tree outer_type, inner_type;
12409
12410 if (!tree_nop_conversion (exp))
12411 return false;
12412
12413 outer_type = TREE_TYPE (exp);
12414 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12415
12416 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
12417 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
12418 }
12419
12420 /* Strip conversions from EXP according to tree_nop_conversion and
12421 return the resulting expression. */
12422
12423 tree
12424 tree_strip_nop_conversions (tree exp)
12425 {
12426 while (tree_nop_conversion (exp))
12427 exp = TREE_OPERAND (exp, 0);
12428 return exp;
12429 }
12430
12431 /* Strip conversions from EXP according to tree_sign_nop_conversion
12432 and return the resulting expression. */
12433
12434 tree
12435 tree_strip_sign_nop_conversions (tree exp)
12436 {
12437 while (tree_sign_nop_conversion (exp))
12438 exp = TREE_OPERAND (exp, 0);
12439 return exp;
12440 }
12441
12442 /* Avoid any floating point extensions from EXP. */
12443 tree
12444 strip_float_extensions (tree exp)
12445 {
12446 tree sub, expt, subt;
12447
12448 /* For floating point constant look up the narrowest type that can hold
12449 it properly and handle it like (type)(narrowest_type)constant.
12450 This way we can optimize for instance a=a*2.0 where "a" is float
12451 but 2.0 is double constant. */
12452 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
12453 {
12454 REAL_VALUE_TYPE orig;
12455 tree type = NULL;
12456
12457 orig = TREE_REAL_CST (exp);
12458 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
12459 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
12460 type = float_type_node;
12461 else if (TYPE_PRECISION (TREE_TYPE (exp))
12462 > TYPE_PRECISION (double_type_node)
12463 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
12464 type = double_type_node;
12465 if (type)
12466 return build_real_truncate (type, orig);
12467 }
12468
12469 if (!CONVERT_EXPR_P (exp))
12470 return exp;
12471
12472 sub = TREE_OPERAND (exp, 0);
12473 subt = TREE_TYPE (sub);
12474 expt = TREE_TYPE (exp);
12475
12476 if (!FLOAT_TYPE_P (subt))
12477 return exp;
12478
12479 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
12480 return exp;
12481
12482 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
12483 return exp;
12484
12485 return strip_float_extensions (sub);
12486 }
12487
12488 /* Strip out all handled components that produce invariant
12489 offsets. */
12490
12491 const_tree
12492 strip_invariant_refs (const_tree op)
12493 {
12494 while (handled_component_p (op))
12495 {
12496 switch (TREE_CODE (op))
12497 {
12498 case ARRAY_REF:
12499 case ARRAY_RANGE_REF:
12500 if (!is_gimple_constant (TREE_OPERAND (op, 1))
12501 || TREE_OPERAND (op, 2) != NULL_TREE
12502 || TREE_OPERAND (op, 3) != NULL_TREE)
12503 return NULL;
12504 break;
12505
12506 case COMPONENT_REF:
12507 if (TREE_OPERAND (op, 2) != NULL_TREE)
12508 return NULL;
12509 break;
12510
12511 default:;
12512 }
12513 op = TREE_OPERAND (op, 0);
12514 }
12515
12516 return op;
12517 }
12518
12519 static GTY(()) tree gcc_eh_personality_decl;
12520
12521 /* Return the GCC personality function decl. */
12522
12523 tree
12524 lhd_gcc_personality (void)
12525 {
12526 if (!gcc_eh_personality_decl)
12527 gcc_eh_personality_decl = build_personality_function ("gcc");
12528 return gcc_eh_personality_decl;
12529 }
12530
12531 /* TARGET is a call target of GIMPLE call statement
12532 (obtained by gimple_call_fn). Return true if it is
12533 OBJ_TYPE_REF representing an virtual call of C++ method.
12534 (As opposed to OBJ_TYPE_REF representing objc calls
12535 through a cast where middle-end devirtualization machinery
12536 can't apply.) */
12537
12538 bool
12539 virtual_method_call_p (const_tree target)
12540 {
12541 if (TREE_CODE (target) != OBJ_TYPE_REF)
12542 return false;
12543 tree t = TREE_TYPE (target);
12544 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
12545 t = TREE_TYPE (t);
12546 if (TREE_CODE (t) == FUNCTION_TYPE)
12547 return false;
12548 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
12549 /* If we do not have BINFO associated, it means that type was built
12550 without devirtualization enabled. Do not consider this a virtual
12551 call. */
12552 if (!TYPE_BINFO (obj_type_ref_class (target)))
12553 return false;
12554 return true;
12555 }
12556
12557 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
12558
12559 tree
12560 obj_type_ref_class (const_tree ref)
12561 {
12562 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
12563 ref = TREE_TYPE (ref);
12564 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12565 ref = TREE_TYPE (ref);
12566 /* We look for type THIS points to. ObjC also builds
12567 OBJ_TYPE_REF with non-method calls, Their first parameter
12568 ID however also corresponds to class type. */
12569 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
12570 || TREE_CODE (ref) == FUNCTION_TYPE);
12571 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
12572 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12573 return TREE_TYPE (ref);
12574 }
12575
12576 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
12577
12578 static tree
12579 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
12580 {
12581 unsigned int i;
12582 tree base_binfo, b;
12583
12584 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12585 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
12586 && types_same_for_odr (TREE_TYPE (base_binfo), type))
12587 return base_binfo;
12588 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
12589 return b;
12590 return NULL;
12591 }
12592
12593 /* Try to find a base info of BINFO that would have its field decl at offset
12594 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
12595 found, return, otherwise return NULL_TREE. */
12596
12597 tree
12598 get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
12599 {
12600 tree type = BINFO_TYPE (binfo);
12601
12602 while (true)
12603 {
12604 HOST_WIDE_INT pos, size;
12605 tree fld;
12606 int i;
12607
12608 if (types_same_for_odr (type, expected_type))
12609 return binfo;
12610 if (offset < 0)
12611 return NULL_TREE;
12612
12613 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
12614 {
12615 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
12616 continue;
12617
12618 pos = int_bit_position (fld);
12619 size = tree_to_uhwi (DECL_SIZE (fld));
12620 if (pos <= offset && (pos + size) > offset)
12621 break;
12622 }
12623 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
12624 return NULL_TREE;
12625
12626 /* Offset 0 indicates the primary base, whose vtable contents are
12627 represented in the binfo for the derived class. */
12628 else if (offset != 0)
12629 {
12630 tree found_binfo = NULL, base_binfo;
12631 /* Offsets in BINFO are in bytes relative to the whole structure
12632 while POS is in bits relative to the containing field. */
12633 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
12634 / BITS_PER_UNIT);
12635
12636 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12637 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
12638 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
12639 {
12640 found_binfo = base_binfo;
12641 break;
12642 }
12643 if (found_binfo)
12644 binfo = found_binfo;
12645 else
12646 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
12647 binfo_offset);
12648 }
12649
12650 type = TREE_TYPE (fld);
12651 offset -= pos;
12652 }
12653 }
12654
12655 /* Returns true if X is a typedef decl. */
12656
12657 bool
12658 is_typedef_decl (const_tree x)
12659 {
12660 return (x && TREE_CODE (x) == TYPE_DECL
12661 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
12662 }
12663
12664 /* Returns true iff TYPE is a type variant created for a typedef. */
12665
12666 bool
12667 typedef_variant_p (const_tree type)
12668 {
12669 return is_typedef_decl (TYPE_NAME (type));
12670 }
12671
12672 /* Warn about a use of an identifier which was marked deprecated. */
12673 void
12674 warn_deprecated_use (tree node, tree attr)
12675 {
12676 const char *msg;
12677
12678 if (node == 0 || !warn_deprecated_decl)
12679 return;
12680
12681 if (!attr)
12682 {
12683 if (DECL_P (node))
12684 attr = DECL_ATTRIBUTES (node);
12685 else if (TYPE_P (node))
12686 {
12687 tree decl = TYPE_STUB_DECL (node);
12688 if (decl)
12689 attr = lookup_attribute ("deprecated",
12690 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12691 }
12692 }
12693
12694 if (attr)
12695 attr = lookup_attribute ("deprecated", attr);
12696
12697 if (attr)
12698 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
12699 else
12700 msg = NULL;
12701
12702 bool w;
12703 if (DECL_P (node))
12704 {
12705 if (msg)
12706 w = warning (OPT_Wdeprecated_declarations,
12707 "%qD is deprecated: %s", node, msg);
12708 else
12709 w = warning (OPT_Wdeprecated_declarations,
12710 "%qD is deprecated", node);
12711 if (w)
12712 inform (DECL_SOURCE_LOCATION (node), "declared here");
12713 }
12714 else if (TYPE_P (node))
12715 {
12716 tree what = NULL_TREE;
12717 tree decl = TYPE_STUB_DECL (node);
12718
12719 if (TYPE_NAME (node))
12720 {
12721 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12722 what = TYPE_NAME (node);
12723 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12724 && DECL_NAME (TYPE_NAME (node)))
12725 what = DECL_NAME (TYPE_NAME (node));
12726 }
12727
12728 if (decl)
12729 {
12730 if (what)
12731 {
12732 if (msg)
12733 w = warning (OPT_Wdeprecated_declarations,
12734 "%qE is deprecated: %s", what, msg);
12735 else
12736 w = warning (OPT_Wdeprecated_declarations,
12737 "%qE is deprecated", what);
12738 }
12739 else
12740 {
12741 if (msg)
12742 w = warning (OPT_Wdeprecated_declarations,
12743 "type is deprecated: %s", msg);
12744 else
12745 w = warning (OPT_Wdeprecated_declarations,
12746 "type is deprecated");
12747 }
12748 if (w)
12749 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12750 }
12751 else
12752 {
12753 if (what)
12754 {
12755 if (msg)
12756 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
12757 what, msg);
12758 else
12759 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
12760 }
12761 else
12762 {
12763 if (msg)
12764 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
12765 msg);
12766 else
12767 warning (OPT_Wdeprecated_declarations, "type is deprecated");
12768 }
12769 }
12770 }
12771 }
12772
12773 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12774 somewhere in it. */
12775
12776 bool
12777 contains_bitfld_component_ref_p (const_tree ref)
12778 {
12779 while (handled_component_p (ref))
12780 {
12781 if (TREE_CODE (ref) == COMPONENT_REF
12782 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12783 return true;
12784 ref = TREE_OPERAND (ref, 0);
12785 }
12786
12787 return false;
12788 }
12789
12790 /* Try to determine whether a TRY_CATCH expression can fall through.
12791 This is a subroutine of block_may_fallthru. */
12792
12793 static bool
12794 try_catch_may_fallthru (const_tree stmt)
12795 {
12796 tree_stmt_iterator i;
12797
12798 /* If the TRY block can fall through, the whole TRY_CATCH can
12799 fall through. */
12800 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12801 return true;
12802
12803 i = tsi_start (TREE_OPERAND (stmt, 1));
12804 switch (TREE_CODE (tsi_stmt (i)))
12805 {
12806 case CATCH_EXPR:
12807 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12808 catch expression and a body. The whole TRY_CATCH may fall
12809 through iff any of the catch bodies falls through. */
12810 for (; !tsi_end_p (i); tsi_next (&i))
12811 {
12812 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12813 return true;
12814 }
12815 return false;
12816
12817 case EH_FILTER_EXPR:
12818 /* The exception filter expression only matters if there is an
12819 exception. If the exception does not match EH_FILTER_TYPES,
12820 we will execute EH_FILTER_FAILURE, and we will fall through
12821 if that falls through. If the exception does match
12822 EH_FILTER_TYPES, the stack unwinder will continue up the
12823 stack, so we will not fall through. We don't know whether we
12824 will throw an exception which matches EH_FILTER_TYPES or not,
12825 so we just ignore EH_FILTER_TYPES and assume that we might
12826 throw an exception which doesn't match. */
12827 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12828
12829 default:
12830 /* This case represents statements to be executed when an
12831 exception occurs. Those statements are implicitly followed
12832 by a RESX statement to resume execution after the exception.
12833 So in this case the TRY_CATCH never falls through. */
12834 return false;
12835 }
12836 }
12837
12838 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12839 need not be 100% accurate; simply be conservative and return true if we
12840 don't know. This is used only to avoid stupidly generating extra code.
12841 If we're wrong, we'll just delete the extra code later. */
12842
12843 bool
12844 block_may_fallthru (const_tree block)
12845 {
12846 /* This CONST_CAST is okay because expr_last returns its argument
12847 unmodified and we assign it to a const_tree. */
12848 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12849
12850 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12851 {
12852 case GOTO_EXPR:
12853 case RETURN_EXPR:
12854 /* Easy cases. If the last statement of the block implies
12855 control transfer, then we can't fall through. */
12856 return false;
12857
12858 case SWITCH_EXPR:
12859 /* If SWITCH_LABELS is set, this is lowered, and represents a
12860 branch to a selected label and hence can not fall through.
12861 Otherwise SWITCH_BODY is set, and the switch can fall
12862 through. */
12863 return SWITCH_LABELS (stmt) == NULL_TREE;
12864
12865 case COND_EXPR:
12866 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12867 return true;
12868 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12869
12870 case BIND_EXPR:
12871 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12872
12873 case TRY_CATCH_EXPR:
12874 return try_catch_may_fallthru (stmt);
12875
12876 case TRY_FINALLY_EXPR:
12877 /* The finally clause is always executed after the try clause,
12878 so if it does not fall through, then the try-finally will not
12879 fall through. Otherwise, if the try clause does not fall
12880 through, then when the finally clause falls through it will
12881 resume execution wherever the try clause was going. So the
12882 whole try-finally will only fall through if both the try
12883 clause and the finally clause fall through. */
12884 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12885 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12886
12887 case MODIFY_EXPR:
12888 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12889 stmt = TREE_OPERAND (stmt, 1);
12890 else
12891 return true;
12892 /* FALLTHRU */
12893
12894 case CALL_EXPR:
12895 /* Functions that do not return do not fall through. */
12896 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12897
12898 case CLEANUP_POINT_EXPR:
12899 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12900
12901 case TARGET_EXPR:
12902 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12903
12904 case ERROR_MARK:
12905 return true;
12906
12907 default:
12908 return lang_hooks.block_may_fallthru (stmt);
12909 }
12910 }
12911
12912 /* True if we are using EH to handle cleanups. */
12913 static bool using_eh_for_cleanups_flag = false;
12914
12915 /* This routine is called from front ends to indicate eh should be used for
12916 cleanups. */
12917 void
12918 using_eh_for_cleanups (void)
12919 {
12920 using_eh_for_cleanups_flag = true;
12921 }
12922
12923 /* Query whether EH is used for cleanups. */
12924 bool
12925 using_eh_for_cleanups_p (void)
12926 {
12927 return using_eh_for_cleanups_flag;
12928 }
12929
12930 /* Wrapper for tree_code_name to ensure that tree code is valid */
12931 const char *
12932 get_tree_code_name (enum tree_code code)
12933 {
12934 const char *invalid = "<invalid tree code>";
12935
12936 if (code >= MAX_TREE_CODES)
12937 return invalid;
12938
12939 return tree_code_name[code];
12940 }
12941
12942 /* Drops the TREE_OVERFLOW flag from T. */
12943
12944 tree
12945 drop_tree_overflow (tree t)
12946 {
12947 gcc_checking_assert (TREE_OVERFLOW (t));
12948
12949 /* For tree codes with a sharing machinery re-build the result. */
12950 if (TREE_CODE (t) == INTEGER_CST)
12951 return wide_int_to_tree (TREE_TYPE (t), t);
12952
12953 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12954 and drop the flag. */
12955 t = copy_node (t);
12956 TREE_OVERFLOW (t) = 0;
12957 return t;
12958 }
12959
12960 /* Given a memory reference expression T, return its base address.
12961 The base address of a memory reference expression is the main
12962 object being referenced. For instance, the base address for
12963 'array[i].fld[j]' is 'array'. You can think of this as stripping
12964 away the offset part from a memory address.
12965
12966 This function calls handled_component_p to strip away all the inner
12967 parts of the memory reference until it reaches the base object. */
12968
12969 tree
12970 get_base_address (tree t)
12971 {
12972 while (handled_component_p (t))
12973 t = TREE_OPERAND (t, 0);
12974
12975 if ((TREE_CODE (t) == MEM_REF
12976 || TREE_CODE (t) == TARGET_MEM_REF)
12977 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12978 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12979
12980 /* ??? Either the alias oracle or all callers need to properly deal
12981 with WITH_SIZE_EXPRs before we can look through those. */
12982 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12983 return NULL_TREE;
12984
12985 return t;
12986 }
12987
12988 /* Return a tree of sizetype representing the size, in bytes, of the element
12989 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12990
12991 tree
12992 array_ref_element_size (tree exp)
12993 {
12994 tree aligned_size = TREE_OPERAND (exp, 3);
12995 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
12996 location_t loc = EXPR_LOCATION (exp);
12997
12998 /* If a size was specified in the ARRAY_REF, it's the size measured
12999 in alignment units of the element type. So multiply by that value. */
13000 if (aligned_size)
13001 {
13002 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
13003 sizetype from another type of the same width and signedness. */
13004 if (TREE_TYPE (aligned_size) != sizetype)
13005 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
13006 return size_binop_loc (loc, MULT_EXPR, aligned_size,
13007 size_int (TYPE_ALIGN_UNIT (elmt_type)));
13008 }
13009
13010 /* Otherwise, take the size from that of the element type. Substitute
13011 any PLACEHOLDER_EXPR that we have. */
13012 else
13013 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
13014 }
13015
13016 /* Return a tree representing the lower bound of the array mentioned in
13017 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
13018
13019 tree
13020 array_ref_low_bound (tree exp)
13021 {
13022 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
13023
13024 /* If a lower bound is specified in EXP, use it. */
13025 if (TREE_OPERAND (exp, 2))
13026 return TREE_OPERAND (exp, 2);
13027
13028 /* Otherwise, if there is a domain type and it has a lower bound, use it,
13029 substituting for a PLACEHOLDER_EXPR as needed. */
13030 if (domain_type && TYPE_MIN_VALUE (domain_type))
13031 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
13032
13033 /* Otherwise, return a zero of the appropriate type. */
13034 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
13035 }
13036
13037 /* Return a tree representing the upper bound of the array mentioned in
13038 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
13039
13040 tree
13041 array_ref_up_bound (tree exp)
13042 {
13043 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
13044
13045 /* If there is a domain type and it has an upper bound, use it, substituting
13046 for a PLACEHOLDER_EXPR as needed. */
13047 if (domain_type && TYPE_MAX_VALUE (domain_type))
13048 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
13049
13050 /* Otherwise fail. */
13051 return NULL_TREE;
13052 }
13053
13054 /* Returns true if REF is an array reference to an array at the end of
13055 a structure. If this is the case, the array may be allocated larger
13056 than its upper bound implies. */
13057
13058 bool
13059 array_at_struct_end_p (tree ref)
13060 {
13061 if (TREE_CODE (ref) != ARRAY_REF
13062 && TREE_CODE (ref) != ARRAY_RANGE_REF)
13063 return false;
13064
13065 while (handled_component_p (ref))
13066 {
13067 /* If the reference chain contains a component reference to a
13068 non-union type and there follows another field the reference
13069 is not at the end of a structure. */
13070 if (TREE_CODE (ref) == COMPONENT_REF
13071 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
13072 {
13073 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
13074 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
13075 nextf = DECL_CHAIN (nextf);
13076 if (nextf)
13077 return false;
13078 }
13079
13080 ref = TREE_OPERAND (ref, 0);
13081 }
13082
13083 tree size = NULL;
13084
13085 if (TREE_CODE (ref) == MEM_REF
13086 && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
13087 {
13088 size = TYPE_SIZE (TREE_TYPE (ref));
13089 ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
13090 }
13091
13092 /* If the reference is based on a declared entity, the size of the array
13093 is constrained by its given domain. (Do not trust commons PR/69368). */
13094 if (DECL_P (ref)
13095 /* Be sure the size of MEM_REF target match. For example:
13096
13097 char buf[10];
13098 struct foo *str = (struct foo *)&buf;
13099
13100 str->trailin_array[2] = 1;
13101
13102 is valid because BUF allocate enough space. */
13103
13104 && (!size || (DECL_SIZE (ref) != NULL
13105 && operand_equal_p (DECL_SIZE (ref), size, 0)))
13106 && !(flag_unconstrained_commons
13107 && TREE_CODE (ref) == VAR_DECL && DECL_COMMON (ref)))
13108 return false;
13109
13110 return true;
13111 }
13112
13113 /* Return a tree representing the offset, in bytes, of the field referenced
13114 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
13115
13116 tree
13117 component_ref_field_offset (tree exp)
13118 {
13119 tree aligned_offset = TREE_OPERAND (exp, 2);
13120 tree field = TREE_OPERAND (exp, 1);
13121 location_t loc = EXPR_LOCATION (exp);
13122
13123 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
13124 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
13125 value. */
13126 if (aligned_offset)
13127 {
13128 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
13129 sizetype from another type of the same width and signedness. */
13130 if (TREE_TYPE (aligned_offset) != sizetype)
13131 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
13132 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
13133 size_int (DECL_OFFSET_ALIGN (field)
13134 / BITS_PER_UNIT));
13135 }
13136
13137 /* Otherwise, take the offset from that of the field. Substitute
13138 any PLACEHOLDER_EXPR that we have. */
13139 else
13140 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
13141 }
13142
13143 /* Return the machine mode of T. For vectors, returns the mode of the
13144 inner type. The main use case is to feed the result to HONOR_NANS,
13145 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
13146
13147 machine_mode
13148 element_mode (const_tree t)
13149 {
13150 if (!TYPE_P (t))
13151 t = TREE_TYPE (t);
13152 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
13153 t = TREE_TYPE (t);
13154 return TYPE_MODE (t);
13155 }
13156
13157
13158 /* Veirfy that basic properties of T match TV and thus T can be a variant of
13159 TV. TV should be the more specified variant (i.e. the main variant). */
13160
13161 static bool
13162 verify_type_variant (const_tree t, tree tv)
13163 {
13164 /* Type variant can differ by:
13165
13166 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
13167 ENCODE_QUAL_ADDR_SPACE.
13168 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
13169 in this case some values may not be set in the variant types
13170 (see TYPE_COMPLETE_P checks).
13171 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
13172 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
13173 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
13174 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
13175 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
13176 this is necessary to make it possible to merge types form different TUs
13177 - arrays, pointers and references may have TREE_TYPE that is a variant
13178 of TREE_TYPE of their main variants.
13179 - aggregates may have new TYPE_FIELDS list that list variants of
13180 the main variant TYPE_FIELDS.
13181 - vector types may differ by TYPE_VECTOR_OPAQUE
13182 - TYPE_METHODS is always NULL for vairant types and maintained for
13183 main variant only.
13184 */
13185
13186 /* Convenience macro for matching individual fields. */
13187 #define verify_variant_match(flag) \
13188 do { \
13189 if (flag (tv) != flag (t)) \
13190 { \
13191 error ("type variant differs by " #flag "."); \
13192 debug_tree (tv); \
13193 return false; \
13194 } \
13195 } while (false)
13196
13197 /* tree_base checks. */
13198
13199 verify_variant_match (TREE_CODE);
13200 /* FIXME: Ada builds non-artificial variants of artificial types. */
13201 if (TYPE_ARTIFICIAL (tv) && 0)
13202 verify_variant_match (TYPE_ARTIFICIAL);
13203 if (POINTER_TYPE_P (tv))
13204 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
13205 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
13206 verify_variant_match (TYPE_UNSIGNED);
13207 verify_variant_match (TYPE_ALIGN_OK);
13208 verify_variant_match (TYPE_PACKED);
13209 if (TREE_CODE (t) == REFERENCE_TYPE)
13210 verify_variant_match (TYPE_REF_IS_RVALUE);
13211 if (AGGREGATE_TYPE_P (t))
13212 verify_variant_match (TYPE_REVERSE_STORAGE_ORDER);
13213 else
13214 verify_variant_match (TYPE_SATURATING);
13215 /* FIXME: This check trigger during libstdc++ build. */
13216 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t) && 0)
13217 verify_variant_match (TYPE_FINAL_P);
13218
13219 /* tree_type_common checks. */
13220
13221 if (COMPLETE_TYPE_P (t))
13222 {
13223 verify_variant_match (TYPE_SIZE);
13224 verify_variant_match (TYPE_MODE);
13225 if (TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv)
13226 /* FIXME: ideally we should compare pointer equality, but java FE
13227 produce variants where size is INTEGER_CST of different type (int
13228 wrt size_type) during libjava biuld. */
13229 && !operand_equal_p (TYPE_SIZE_UNIT (t), TYPE_SIZE_UNIT (tv), 0))
13230 {
13231 error ("type variant has different TYPE_SIZE_UNIT");
13232 debug_tree (tv);
13233 error ("type variant's TYPE_SIZE_UNIT");
13234 debug_tree (TYPE_SIZE_UNIT (tv));
13235 error ("type's TYPE_SIZE_UNIT");
13236 debug_tree (TYPE_SIZE_UNIT (t));
13237 return false;
13238 }
13239 }
13240 verify_variant_match (TYPE_PRECISION);
13241 verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
13242 if (RECORD_OR_UNION_TYPE_P (t))
13243 verify_variant_match (TYPE_TRANSPARENT_AGGR);
13244 else if (TREE_CODE (t) == ARRAY_TYPE)
13245 verify_variant_match (TYPE_NONALIASED_COMPONENT);
13246 /* During LTO we merge variant lists from diferent translation units
13247 that may differ BY TYPE_CONTEXT that in turn may point
13248 to TRANSLATION_UNIT_DECL.
13249 Ada also builds variants of types with different TYPE_CONTEXT. */
13250 if ((!in_lto_p || !TYPE_FILE_SCOPE_P (t)) && 0)
13251 verify_variant_match (TYPE_CONTEXT);
13252 verify_variant_match (TYPE_STRING_FLAG);
13253 if (TYPE_ALIAS_SET_KNOWN_P (t))
13254 {
13255 error ("type variant with TYPE_ALIAS_SET_KNOWN_P");
13256 debug_tree (tv);
13257 return false;
13258 }
13259
13260 /* tree_type_non_common checks. */
13261
13262 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13263 and dangle the pointer from time to time. */
13264 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
13265 && (in_lto_p || !TYPE_VFIELD (tv)
13266 || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
13267 {
13268 error ("type variant has different TYPE_VFIELD");
13269 debug_tree (tv);
13270 return false;
13271 }
13272 if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
13273 || TREE_CODE (t) == INTEGER_TYPE
13274 || TREE_CODE (t) == BOOLEAN_TYPE
13275 || TREE_CODE (t) == REAL_TYPE
13276 || TREE_CODE (t) == FIXED_POINT_TYPE)
13277 {
13278 verify_variant_match (TYPE_MAX_VALUE);
13279 verify_variant_match (TYPE_MIN_VALUE);
13280 }
13281 if (TREE_CODE (t) == METHOD_TYPE)
13282 verify_variant_match (TYPE_METHOD_BASETYPE);
13283 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_METHODS (t))
13284 {
13285 error ("type variant has TYPE_METHODS");
13286 debug_tree (tv);
13287 return false;
13288 }
13289 if (TREE_CODE (t) == OFFSET_TYPE)
13290 verify_variant_match (TYPE_OFFSET_BASETYPE);
13291 if (TREE_CODE (t) == ARRAY_TYPE)
13292 verify_variant_match (TYPE_ARRAY_MAX_SIZE);
13293 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
13294 or even type's main variant. This is needed to make bootstrap pass
13295 and the bug seems new in GCC 5.
13296 C++ FE should be updated to make this consistent and we should check
13297 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
13298 is a match with main variant.
13299
13300 Also disable the check for Java for now because of parser hack that builds
13301 first an dummy BINFO and then sometimes replace it by real BINFO in some
13302 of the copies. */
13303 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
13304 && TYPE_BINFO (t) != TYPE_BINFO (tv)
13305 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
13306 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
13307 at LTO time only. */
13308 && (in_lto_p && odr_type_p (t)))
13309 {
13310 error ("type variant has different TYPE_BINFO");
13311 debug_tree (tv);
13312 error ("type variant's TYPE_BINFO");
13313 debug_tree (TYPE_BINFO (tv));
13314 error ("type's TYPE_BINFO");
13315 debug_tree (TYPE_BINFO (t));
13316 return false;
13317 }
13318
13319 /* Check various uses of TYPE_VALUES_RAW. */
13320 if (TREE_CODE (t) == ENUMERAL_TYPE)
13321 verify_variant_match (TYPE_VALUES);
13322 else if (TREE_CODE (t) == ARRAY_TYPE)
13323 verify_variant_match (TYPE_DOMAIN);
13324 /* Permit incomplete variants of complete type. While FEs may complete
13325 all variants, this does not happen for C++ templates in all cases. */
13326 else if (RECORD_OR_UNION_TYPE_P (t)
13327 && COMPLETE_TYPE_P (t)
13328 && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
13329 {
13330 tree f1, f2;
13331
13332 /* Fortran builds qualified variants as new records with items of
13333 qualified type. Verify that they looks same. */
13334 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
13335 f1 && f2;
13336 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13337 if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
13338 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
13339 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
13340 /* FIXME: gfc_nonrestricted_type builds all types as variants
13341 with exception of pointer types. It deeply copies the type
13342 which means that we may end up with a variant type
13343 referring non-variant pointer. We may change it to
13344 produce types as variants, too, like
13345 objc_get_protocol_qualified_type does. */
13346 && !POINTER_TYPE_P (TREE_TYPE (f1)))
13347 || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
13348 || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
13349 break;
13350 if (f1 || f2)
13351 {
13352 error ("type variant has different TYPE_FIELDS");
13353 debug_tree (tv);
13354 error ("first mismatch is field");
13355 debug_tree (f1);
13356 error ("and field");
13357 debug_tree (f2);
13358 return false;
13359 }
13360 }
13361 else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE))
13362 verify_variant_match (TYPE_ARG_TYPES);
13363 /* For C++ the qualified variant of array type is really an array type
13364 of qualified TREE_TYPE.
13365 objc builds variants of pointer where pointer to type is a variant, too
13366 in objc_get_protocol_qualified_type. */
13367 if (TREE_TYPE (t) != TREE_TYPE (tv)
13368 && ((TREE_CODE (t) != ARRAY_TYPE
13369 && !POINTER_TYPE_P (t))
13370 || TYPE_MAIN_VARIANT (TREE_TYPE (t))
13371 != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
13372 {
13373 error ("type variant has different TREE_TYPE");
13374 debug_tree (tv);
13375 error ("type variant's TREE_TYPE");
13376 debug_tree (TREE_TYPE (tv));
13377 error ("type's TREE_TYPE");
13378 debug_tree (TREE_TYPE (t));
13379 return false;
13380 }
13381 if (type_with_alias_set_p (t)
13382 && !gimple_canonical_types_compatible_p (t, tv, false))
13383 {
13384 error ("type is not compatible with its vairant");
13385 debug_tree (tv);
13386 error ("type variant's TREE_TYPE");
13387 debug_tree (TREE_TYPE (tv));
13388 error ("type's TREE_TYPE");
13389 debug_tree (TREE_TYPE (t));
13390 return false;
13391 }
13392 return true;
13393 #undef verify_variant_match
13394 }
13395
13396
13397 /* The TYPE_CANONICAL merging machinery. It should closely resemble
13398 the middle-end types_compatible_p function. It needs to avoid
13399 claiming types are different for types that should be treated
13400 the same with respect to TBAA. Canonical types are also used
13401 for IL consistency checks via the useless_type_conversion_p
13402 predicate which does not handle all type kinds itself but falls
13403 back to pointer-comparison of TYPE_CANONICAL for aggregates
13404 for example. */
13405
13406 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
13407 type calculation because we need to allow inter-operability between signed
13408 and unsigned variants. */
13409
13410 bool
13411 type_with_interoperable_signedness (const_tree type)
13412 {
13413 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
13414 signed char and unsigned char. Similarly fortran FE builds
13415 C_SIZE_T as signed type, while C defines it unsigned. */
13416
13417 return tree_code_for_canonical_type_merging (TREE_CODE (type))
13418 == INTEGER_TYPE
13419 && (TYPE_PRECISION (type) == TYPE_PRECISION (signed_char_type_node)
13420 || TYPE_PRECISION (type) == TYPE_PRECISION (size_type_node));
13421 }
13422
13423 /* Return true iff T1 and T2 are structurally identical for what
13424 TBAA is concerned.
13425 This function is used both by lto.c canonical type merging and by the
13426 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
13427 that have TYPE_CANONICAL defined and assume them equivalent. This is useful
13428 only for LTO because only in these cases TYPE_CANONICAL equivalence
13429 correspond to one defined by gimple_canonical_types_compatible_p. */
13430
13431 bool
13432 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
13433 bool trust_type_canonical)
13434 {
13435 /* Type variants should be same as the main variant. When not doing sanity
13436 checking to verify this fact, go to main variants and save some work. */
13437 if (trust_type_canonical)
13438 {
13439 t1 = TYPE_MAIN_VARIANT (t1);
13440 t2 = TYPE_MAIN_VARIANT (t2);
13441 }
13442
13443 /* Check first for the obvious case of pointer identity. */
13444 if (t1 == t2)
13445 return true;
13446
13447 /* Check that we have two types to compare. */
13448 if (t1 == NULL_TREE || t2 == NULL_TREE)
13449 return false;
13450
13451 /* We consider complete types always compatible with incomplete type.
13452 This does not make sense for canonical type calculation and thus we
13453 need to ensure that we are never called on it.
13454
13455 FIXME: For more correctness the function probably should have three modes
13456 1) mode assuming that types are complete mathcing their structure
13457 2) mode allowing incomplete types but producing equivalence classes
13458 and thus ignoring all info from complete types
13459 3) mode allowing incomplete types to match complete but checking
13460 compatibility between complete types.
13461
13462 1 and 2 can be used for canonical type calculation. 3 is the real
13463 definition of type compatibility that can be used i.e. for warnings during
13464 declaration merging. */
13465
13466 gcc_assert (!trust_type_canonical
13467 || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
13468 /* If the types have been previously registered and found equal
13469 they still are. */
13470
13471 if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
13472 && trust_type_canonical)
13473 {
13474 /* Do not use TYPE_CANONICAL of pointer types. For LTO streamed types
13475 they are always NULL, but they are set to non-NULL for types
13476 constructed by build_pointer_type and variants. In this case the
13477 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
13478 all pointers are considered equal. Be sure to not return false
13479 negatives. */
13480 gcc_checking_assert (canonical_type_used_p (t1)
13481 && canonical_type_used_p (t2));
13482 return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
13483 }
13484
13485 /* Can't be the same type if the types don't have the same code. */
13486 enum tree_code code = tree_code_for_canonical_type_merging (TREE_CODE (t1));
13487 if (code != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
13488 return false;
13489
13490 /* Qualifiers do not matter for canonical type comparison purposes. */
13491
13492 /* Void types and nullptr types are always the same. */
13493 if (TREE_CODE (t1) == VOID_TYPE
13494 || TREE_CODE (t1) == NULLPTR_TYPE)
13495 return true;
13496
13497 /* Can't be the same type if they have different mode. */
13498 if (TYPE_MODE (t1) != TYPE_MODE (t2))
13499 return false;
13500
13501 /* Non-aggregate types can be handled cheaply. */
13502 if (INTEGRAL_TYPE_P (t1)
13503 || SCALAR_FLOAT_TYPE_P (t1)
13504 || FIXED_POINT_TYPE_P (t1)
13505 || TREE_CODE (t1) == VECTOR_TYPE
13506 || TREE_CODE (t1) == COMPLEX_TYPE
13507 || TREE_CODE (t1) == OFFSET_TYPE
13508 || POINTER_TYPE_P (t1))
13509 {
13510 /* Can't be the same type if they have different recision. */
13511 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
13512 return false;
13513
13514 /* In some cases the signed and unsigned types are required to be
13515 inter-operable. */
13516 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)
13517 && !type_with_interoperable_signedness (t1))
13518 return false;
13519
13520 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
13521 interoperable with "signed char". Unless all frontends are revisited
13522 to agree on these types, we must ignore the flag completely. */
13523
13524 /* Fortran standard define C_PTR type that is compatible with every
13525 C pointer. For this reason we need to glob all pointers into one.
13526 Still pointers in different address spaces are not compatible. */
13527 if (POINTER_TYPE_P (t1))
13528 {
13529 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
13530 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
13531 return false;
13532 }
13533
13534 /* Tail-recurse to components. */
13535 if (TREE_CODE (t1) == VECTOR_TYPE
13536 || TREE_CODE (t1) == COMPLEX_TYPE)
13537 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
13538 TREE_TYPE (t2),
13539 trust_type_canonical);
13540
13541 return true;
13542 }
13543
13544 /* Do type-specific comparisons. */
13545 switch (TREE_CODE (t1))
13546 {
13547 case ARRAY_TYPE:
13548 /* Array types are the same if the element types are the same and
13549 the number of elements are the same. */
13550 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13551 trust_type_canonical)
13552 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
13553 || TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2)
13554 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
13555 return false;
13556 else
13557 {
13558 tree i1 = TYPE_DOMAIN (t1);
13559 tree i2 = TYPE_DOMAIN (t2);
13560
13561 /* For an incomplete external array, the type domain can be
13562 NULL_TREE. Check this condition also. */
13563 if (i1 == NULL_TREE && i2 == NULL_TREE)
13564 return true;
13565 else if (i1 == NULL_TREE || i2 == NULL_TREE)
13566 return false;
13567 else
13568 {
13569 tree min1 = TYPE_MIN_VALUE (i1);
13570 tree min2 = TYPE_MIN_VALUE (i2);
13571 tree max1 = TYPE_MAX_VALUE (i1);
13572 tree max2 = TYPE_MAX_VALUE (i2);
13573
13574 /* The minimum/maximum values have to be the same. */
13575 if ((min1 == min2
13576 || (min1 && min2
13577 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
13578 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
13579 || operand_equal_p (min1, min2, 0))))
13580 && (max1 == max2
13581 || (max1 && max2
13582 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
13583 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
13584 || operand_equal_p (max1, max2, 0)))))
13585 return true;
13586 else
13587 return false;
13588 }
13589 }
13590
13591 case METHOD_TYPE:
13592 case FUNCTION_TYPE:
13593 /* Function types are the same if the return type and arguments types
13594 are the same. */
13595 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13596 trust_type_canonical))
13597 return false;
13598
13599 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
13600 return true;
13601 else
13602 {
13603 tree parms1, parms2;
13604
13605 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
13606 parms1 && parms2;
13607 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
13608 {
13609 if (!gimple_canonical_types_compatible_p
13610 (TREE_VALUE (parms1), TREE_VALUE (parms2),
13611 trust_type_canonical))
13612 return false;
13613 }
13614
13615 if (parms1 || parms2)
13616 return false;
13617
13618 return true;
13619 }
13620
13621 case RECORD_TYPE:
13622 case UNION_TYPE:
13623 case QUAL_UNION_TYPE:
13624 {
13625 tree f1, f2;
13626
13627 /* Don't try to compare variants of an incomplete type, before
13628 TYPE_FIELDS has been copied around. */
13629 if (!COMPLETE_TYPE_P (t1) && !COMPLETE_TYPE_P (t2))
13630 return true;
13631
13632
13633 if (TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2))
13634 return false;
13635
13636 /* For aggregate types, all the fields must be the same. */
13637 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
13638 f1 || f2;
13639 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13640 {
13641 /* Skip non-fields. */
13642 while (f1 && TREE_CODE (f1) != FIELD_DECL)
13643 f1 = TREE_CHAIN (f1);
13644 while (f2 && TREE_CODE (f2) != FIELD_DECL)
13645 f2 = TREE_CHAIN (f2);
13646 if (!f1 || !f2)
13647 break;
13648 /* The fields must have the same name, offset and type. */
13649 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
13650 || !gimple_compare_field_offset (f1, f2)
13651 || !gimple_canonical_types_compatible_p
13652 (TREE_TYPE (f1), TREE_TYPE (f2),
13653 trust_type_canonical))
13654 return false;
13655 }
13656
13657 /* If one aggregate has more fields than the other, they
13658 are not the same. */
13659 if (f1 || f2)
13660 return false;
13661
13662 return true;
13663 }
13664
13665 default:
13666 /* Consider all types with language specific trees in them mutually
13667 compatible. This is executed only from verify_type and false
13668 positives can be tolerated. */
13669 gcc_assert (!in_lto_p);
13670 return true;
13671 }
13672 }
13673
13674 /* Verify type T. */
13675
13676 void
13677 verify_type (const_tree t)
13678 {
13679 bool error_found = false;
13680 tree mv = TYPE_MAIN_VARIANT (t);
13681 if (!mv)
13682 {
13683 error ("Main variant is not defined");
13684 error_found = true;
13685 }
13686 else if (mv != TYPE_MAIN_VARIANT (mv))
13687 {
13688 error ("TYPE_MAIN_VARIANT has different TYPE_MAIN_VARIANT");
13689 debug_tree (mv);
13690 error_found = true;
13691 }
13692 else if (t != mv && !verify_type_variant (t, mv))
13693 error_found = true;
13694
13695 tree ct = TYPE_CANONICAL (t);
13696 if (!ct)
13697 ;
13698 else if (TYPE_CANONICAL (t) != ct)
13699 {
13700 error ("TYPE_CANONICAL has different TYPE_CANONICAL");
13701 debug_tree (ct);
13702 error_found = true;
13703 }
13704 /* Method and function types can not be used to address memory and thus
13705 TYPE_CANONICAL really matters only for determining useless conversions.
13706
13707 FIXME: C++ FE produce declarations of builtin functions that are not
13708 compatible with main variants. */
13709 else if (TREE_CODE (t) == FUNCTION_TYPE)
13710 ;
13711 else if (t != ct
13712 /* FIXME: gimple_canonical_types_compatible_p can not compare types
13713 with variably sized arrays because their sizes possibly
13714 gimplified to different variables. */
13715 && !variably_modified_type_p (ct, NULL)
13716 && !gimple_canonical_types_compatible_p (t, ct, false))
13717 {
13718 error ("TYPE_CANONICAL is not compatible");
13719 debug_tree (ct);
13720 error_found = true;
13721 }
13722
13723 if (COMPLETE_TYPE_P (t) && TYPE_CANONICAL (t)
13724 && TYPE_MODE (t) != TYPE_MODE (TYPE_CANONICAL (t)))
13725 {
13726 error ("TYPE_MODE of TYPE_CANONICAL is not compatible");
13727 debug_tree (ct);
13728 error_found = true;
13729 }
13730 /* FIXME: this is violated by the C++ FE as discussed in PR70029, when
13731 FUNCTION_*_QUALIFIED flags are set. */
13732 if (0 && TYPE_MAIN_VARIANT (t) == t && ct && TYPE_MAIN_VARIANT (ct) != ct)
13733 {
13734 error ("TYPE_CANONICAL of main variant is not main variant");
13735 debug_tree (ct);
13736 debug_tree (TYPE_MAIN_VARIANT (ct));
13737 error_found = true;
13738 }
13739
13740
13741 /* Check various uses of TYPE_MINVAL. */
13742 if (RECORD_OR_UNION_TYPE_P (t))
13743 {
13744 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13745 and danagle the pointer from time to time. */
13746 if (TYPE_VFIELD (t)
13747 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
13748 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
13749 {
13750 error ("TYPE_VFIELD is not FIELD_DECL nor TREE_LIST");
13751 debug_tree (TYPE_VFIELD (t));
13752 error_found = true;
13753 }
13754 }
13755 else if (TREE_CODE (t) == POINTER_TYPE)
13756 {
13757 if (TYPE_NEXT_PTR_TO (t)
13758 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
13759 {
13760 error ("TYPE_NEXT_PTR_TO is not POINTER_TYPE");
13761 debug_tree (TYPE_NEXT_PTR_TO (t));
13762 error_found = true;
13763 }
13764 }
13765 else if (TREE_CODE (t) == REFERENCE_TYPE)
13766 {
13767 if (TYPE_NEXT_REF_TO (t)
13768 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
13769 {
13770 error ("TYPE_NEXT_REF_TO is not REFERENCE_TYPE");
13771 debug_tree (TYPE_NEXT_REF_TO (t));
13772 error_found = true;
13773 }
13774 }
13775 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13776 || TREE_CODE (t) == FIXED_POINT_TYPE)
13777 {
13778 /* FIXME: The following check should pass:
13779 useless_type_conversion_p (const_cast <tree> (t),
13780 TREE_TYPE (TYPE_MIN_VALUE (t))
13781 but does not for C sizetypes in LTO. */
13782 }
13783 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
13784 else if (TYPE_MINVAL (t)
13785 && ((TREE_CODE (t) != METHOD_TYPE && TREE_CODE (t) != FUNCTION_TYPE)
13786 || in_lto_p))
13787 {
13788 error ("TYPE_MINVAL non-NULL");
13789 debug_tree (TYPE_MINVAL (t));
13790 error_found = true;
13791 }
13792
13793 /* Check various uses of TYPE_MAXVAL. */
13794 if (RECORD_OR_UNION_TYPE_P (t))
13795 {
13796 if (TYPE_METHODS (t) && TREE_CODE (TYPE_METHODS (t)) != FUNCTION_DECL
13797 && TREE_CODE (TYPE_METHODS (t)) != TEMPLATE_DECL
13798 && TYPE_METHODS (t) != error_mark_node)
13799 {
13800 error ("TYPE_METHODS is not FUNCTION_DECL, TEMPLATE_DECL nor error_mark_node");
13801 debug_tree (TYPE_METHODS (t));
13802 error_found = true;
13803 }
13804 }
13805 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13806 {
13807 if (TYPE_METHOD_BASETYPE (t)
13808 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
13809 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
13810 {
13811 error ("TYPE_METHOD_BASETYPE is not record nor union");
13812 debug_tree (TYPE_METHOD_BASETYPE (t));
13813 error_found = true;
13814 }
13815 }
13816 else if (TREE_CODE (t) == OFFSET_TYPE)
13817 {
13818 if (TYPE_OFFSET_BASETYPE (t)
13819 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
13820 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
13821 {
13822 error ("TYPE_OFFSET_BASETYPE is not record nor union");
13823 debug_tree (TYPE_OFFSET_BASETYPE (t));
13824 error_found = true;
13825 }
13826 }
13827 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13828 || TREE_CODE (t) == FIXED_POINT_TYPE)
13829 {
13830 /* FIXME: The following check should pass:
13831 useless_type_conversion_p (const_cast <tree> (t),
13832 TREE_TYPE (TYPE_MAX_VALUE (t))
13833 but does not for C sizetypes in LTO. */
13834 }
13835 else if (TREE_CODE (t) == ARRAY_TYPE)
13836 {
13837 if (TYPE_ARRAY_MAX_SIZE (t)
13838 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
13839 {
13840 error ("TYPE_ARRAY_MAX_SIZE not INTEGER_CST");
13841 debug_tree (TYPE_ARRAY_MAX_SIZE (t));
13842 error_found = true;
13843 }
13844 }
13845 else if (TYPE_MAXVAL (t))
13846 {
13847 error ("TYPE_MAXVAL non-NULL");
13848 debug_tree (TYPE_MAXVAL (t));
13849 error_found = true;
13850 }
13851
13852 /* Check various uses of TYPE_BINFO. */
13853 if (RECORD_OR_UNION_TYPE_P (t))
13854 {
13855 if (!TYPE_BINFO (t))
13856 ;
13857 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
13858 {
13859 error ("TYPE_BINFO is not TREE_BINFO");
13860 debug_tree (TYPE_BINFO (t));
13861 error_found = true;
13862 }
13863 /* FIXME: Java builds invalid empty binfos that do not have
13864 TREE_TYPE set. */
13865 else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t) && 0)
13866 {
13867 error ("TYPE_BINFO type is not TYPE_MAIN_VARIANT");
13868 debug_tree (TREE_TYPE (TYPE_BINFO (t)));
13869 error_found = true;
13870 }
13871 }
13872 else if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
13873 {
13874 error ("TYPE_LANG_SLOT_1 (binfo) field is non-NULL");
13875 debug_tree (TYPE_LANG_SLOT_1 (t));
13876 error_found = true;
13877 }
13878
13879 /* Check various uses of TYPE_VALUES_RAW. */
13880 if (TREE_CODE (t) == ENUMERAL_TYPE)
13881 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
13882 {
13883 tree value = TREE_VALUE (l);
13884 tree name = TREE_PURPOSE (l);
13885
13886 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
13887 CONST_DECL of ENUMERAL TYPE. */
13888 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
13889 {
13890 error ("Enum value is not CONST_DECL or INTEGER_CST");
13891 debug_tree (value);
13892 debug_tree (name);
13893 error_found = true;
13894 }
13895 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
13896 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
13897 {
13898 error ("Enum value type is not INTEGER_TYPE nor convertible to the enum");
13899 debug_tree (value);
13900 debug_tree (name);
13901 error_found = true;
13902 }
13903 if (TREE_CODE (name) != IDENTIFIER_NODE)
13904 {
13905 error ("Enum value name is not IDENTIFIER_NODE");
13906 debug_tree (value);
13907 debug_tree (name);
13908 error_found = true;
13909 }
13910 }
13911 else if (TREE_CODE (t) == ARRAY_TYPE)
13912 {
13913 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
13914 {
13915 error ("Array TYPE_DOMAIN is not integer type");
13916 debug_tree (TYPE_DOMAIN (t));
13917 error_found = true;
13918 }
13919 }
13920 else if (RECORD_OR_UNION_TYPE_P (t))
13921 {
13922 if (TYPE_FIELDS (t) && !COMPLETE_TYPE_P (t) && in_lto_p)
13923 {
13924 error ("TYPE_FIELDS defined in incomplete type");
13925 error_found = true;
13926 }
13927 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
13928 {
13929 /* TODO: verify properties of decls. */
13930 if (TREE_CODE (fld) == FIELD_DECL)
13931 ;
13932 else if (TREE_CODE (fld) == TYPE_DECL)
13933 ;
13934 else if (TREE_CODE (fld) == CONST_DECL)
13935 ;
13936 else if (TREE_CODE (fld) == VAR_DECL)
13937 ;
13938 else if (TREE_CODE (fld) == TEMPLATE_DECL)
13939 ;
13940 else if (TREE_CODE (fld) == USING_DECL)
13941 ;
13942 else
13943 {
13944 error ("Wrong tree in TYPE_FIELDS list");
13945 debug_tree (fld);
13946 error_found = true;
13947 }
13948 }
13949 }
13950 else if (TREE_CODE (t) == INTEGER_TYPE
13951 || TREE_CODE (t) == BOOLEAN_TYPE
13952 || TREE_CODE (t) == OFFSET_TYPE
13953 || TREE_CODE (t) == REFERENCE_TYPE
13954 || TREE_CODE (t) == NULLPTR_TYPE
13955 || TREE_CODE (t) == POINTER_TYPE)
13956 {
13957 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
13958 {
13959 error ("TYPE_CACHED_VALUES_P is %i while TYPE_CACHED_VALUES is %p",
13960 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
13961 error_found = true;
13962 }
13963 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
13964 {
13965 error ("TYPE_CACHED_VALUES is not TREE_VEC");
13966 debug_tree (TYPE_CACHED_VALUES (t));
13967 error_found = true;
13968 }
13969 /* Verify just enough of cache to ensure that no one copied it to new type.
13970 All copying should go by copy_node that should clear it. */
13971 else if (TYPE_CACHED_VALUES_P (t))
13972 {
13973 int i;
13974 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
13975 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
13976 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
13977 {
13978 error ("wrong TYPE_CACHED_VALUES entry");
13979 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
13980 error_found = true;
13981 break;
13982 }
13983 }
13984 }
13985 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13986 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
13987 {
13988 /* C++ FE uses TREE_PURPOSE to store initial values. */
13989 if (TREE_PURPOSE (l) && in_lto_p)
13990 {
13991 error ("TREE_PURPOSE is non-NULL in TYPE_ARG_TYPES list");
13992 debug_tree (l);
13993 error_found = true;
13994 }
13995 if (!TYPE_P (TREE_VALUE (l)))
13996 {
13997 error ("Wrong entry in TYPE_ARG_TYPES list");
13998 debug_tree (l);
13999 error_found = true;
14000 }
14001 }
14002 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
14003 {
14004 error ("TYPE_VALUES_RAW field is non-NULL");
14005 debug_tree (TYPE_VALUES_RAW (t));
14006 error_found = true;
14007 }
14008 if (TREE_CODE (t) != INTEGER_TYPE
14009 && TREE_CODE (t) != BOOLEAN_TYPE
14010 && TREE_CODE (t) != OFFSET_TYPE
14011 && TREE_CODE (t) != REFERENCE_TYPE
14012 && TREE_CODE (t) != NULLPTR_TYPE
14013 && TREE_CODE (t) != POINTER_TYPE
14014 && TYPE_CACHED_VALUES_P (t))
14015 {
14016 error ("TYPE_CACHED_VALUES_P is set while it should not");
14017 error_found = true;
14018 }
14019 if (TYPE_STRING_FLAG (t)
14020 && TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != INTEGER_TYPE)
14021 {
14022 error ("TYPE_STRING_FLAG is set on wrong type code");
14023 error_found = true;
14024 }
14025 else if (TYPE_STRING_FLAG (t))
14026 {
14027 const_tree b = t;
14028 if (TREE_CODE (b) == ARRAY_TYPE)
14029 b = TREE_TYPE (t);
14030 /* Java builds arrays with TYPE_STRING_FLAG of promoted_char_type
14031 that is 32bits. */
14032 if (TREE_CODE (b) != INTEGER_TYPE)
14033 {
14034 error ("TYPE_STRING_FLAG is set on type that does not look like "
14035 "char nor array of chars");
14036 error_found = true;
14037 }
14038 }
14039
14040 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
14041 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
14042 of a type. */
14043 if (TREE_CODE (t) == METHOD_TYPE
14044 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
14045 {
14046 error ("TYPE_METHOD_BASETYPE is not main variant");
14047 error_found = true;
14048 }
14049
14050 if (error_found)
14051 {
14052 debug_tree (const_cast <tree> (t));
14053 internal_error ("verify_type failed");
14054 }
14055 }
14056
14057
14058 /* Return true if ARG is marked with the nonnull attribute in the
14059 current function signature. */
14060
14061 bool
14062 nonnull_arg_p (const_tree arg)
14063 {
14064 tree t, attrs, fntype;
14065 unsigned HOST_WIDE_INT arg_num;
14066
14067 gcc_assert (TREE_CODE (arg) == PARM_DECL
14068 && (POINTER_TYPE_P (TREE_TYPE (arg))
14069 || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE));
14070
14071 /* The static chain decl is always non null. */
14072 if (arg == cfun->static_chain_decl)
14073 return true;
14074
14075 /* THIS argument of method is always non-NULL. */
14076 if (TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE
14077 && arg == DECL_ARGUMENTS (cfun->decl)
14078 && flag_delete_null_pointer_checks)
14079 return true;
14080
14081 /* Values passed by reference are always non-NULL. */
14082 if (TREE_CODE (TREE_TYPE (arg)) == REFERENCE_TYPE
14083 && flag_delete_null_pointer_checks)
14084 return true;
14085
14086 fntype = TREE_TYPE (cfun->decl);
14087 for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs))
14088 {
14089 attrs = lookup_attribute ("nonnull", attrs);
14090
14091 /* If "nonnull" wasn't specified, we know nothing about the argument. */
14092 if (attrs == NULL_TREE)
14093 return false;
14094
14095 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
14096 if (TREE_VALUE (attrs) == NULL_TREE)
14097 return true;
14098
14099 /* Get the position number for ARG in the function signature. */
14100 for (arg_num = 1, t = DECL_ARGUMENTS (cfun->decl);
14101 t;
14102 t = DECL_CHAIN (t), arg_num++)
14103 {
14104 if (t == arg)
14105 break;
14106 }
14107
14108 gcc_assert (t == arg);
14109
14110 /* Now see if ARG_NUM is mentioned in the nonnull list. */
14111 for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
14112 {
14113 if (compare_tree_int (TREE_VALUE (t), arg_num) == 0)
14114 return true;
14115 }
14116 }
14117
14118 return false;
14119 }
14120
14121 /* Given location LOC, strip away any packed range information
14122 or ad-hoc information. */
14123
14124 location_t
14125 get_pure_location (location_t loc)
14126 {
14127 if (IS_ADHOC_LOC (loc))
14128 loc
14129 = line_table->location_adhoc_data_map.data[loc & MAX_SOURCE_LOCATION].locus;
14130
14131 if (loc >= LINEMAPS_MACRO_LOWEST_LOCATION (line_table))
14132 return loc;
14133
14134 if (loc < RESERVED_LOCATION_COUNT)
14135 return loc;
14136
14137 const line_map *map = linemap_lookup (line_table, loc);
14138 const line_map_ordinary *ordmap = linemap_check_ordinary (map);
14139
14140 return loc & ~((1 << ordmap->m_range_bits) - 1);
14141 }
14142
14143 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
14144 information. */
14145
14146 location_t
14147 set_block (location_t loc, tree block)
14148 {
14149 location_t pure_loc = get_pure_location (loc);
14150 source_range src_range = get_range_from_loc (line_table, loc);
14151 return COMBINE_LOCATION_DATA (line_table, pure_loc, src_range, block);
14152 }
14153
14154 location_t
14155 set_source_range (tree expr, location_t start, location_t finish)
14156 {
14157 source_range src_range;
14158 src_range.m_start = start;
14159 src_range.m_finish = finish;
14160 return set_source_range (expr, src_range);
14161 }
14162
14163 location_t
14164 set_source_range (tree expr, source_range src_range)
14165 {
14166 if (!EXPR_P (expr))
14167 return UNKNOWN_LOCATION;
14168
14169 location_t pure_loc = get_pure_location (EXPR_LOCATION (expr));
14170 location_t adhoc = COMBINE_LOCATION_DATA (line_table,
14171 pure_loc,
14172 src_range,
14173 NULL);
14174 SET_EXPR_LOCATION (expr, adhoc);
14175 return adhoc;
14176 }
14177
14178 location_t
14179 make_location (location_t caret, location_t start, location_t finish)
14180 {
14181 location_t pure_loc = get_pure_location (caret);
14182 source_range src_range;
14183 src_range.m_start = start;
14184 src_range.m_finish = finish;
14185 location_t combined_loc = COMBINE_LOCATION_DATA (line_table,
14186 pure_loc,
14187 src_range,
14188 NULL);
14189 return combined_loc;
14190 }
14191
14192 /* Return the name of combined function FN, for debugging purposes. */
14193
14194 const char *
14195 combined_fn_name (combined_fn fn)
14196 {
14197 if (builtin_fn_p (fn))
14198 {
14199 tree fndecl = builtin_decl_explicit (as_builtin_fn (fn));
14200 return IDENTIFIER_POINTER (DECL_NAME (fndecl));
14201 }
14202 else
14203 return internal_fn_name (as_internal_fn (fn));
14204 }
14205
14206 #if CHECKING_P
14207
14208 namespace selftest {
14209
14210 /* Selftests for tree. */
14211
14212 /* Verify that integer constants are sane. */
14213
14214 static void
14215 test_integer_constants ()
14216 {
14217 ASSERT_TRUE (integer_type_node != NULL);
14218 ASSERT_TRUE (build_int_cst (integer_type_node, 0) != NULL);
14219
14220 tree type = integer_type_node;
14221
14222 tree zero = build_zero_cst (type);
14223 ASSERT_EQ (INTEGER_CST, TREE_CODE (zero));
14224 ASSERT_EQ (type, TREE_TYPE (zero));
14225
14226 tree one = build_int_cst (type, 1);
14227 ASSERT_EQ (INTEGER_CST, TREE_CODE (one));
14228 ASSERT_EQ (type, TREE_TYPE (zero));
14229 }
14230
14231 /* Verify identifiers. */
14232
14233 static void
14234 test_identifiers ()
14235 {
14236 tree identifier = get_identifier ("foo");
14237 ASSERT_EQ (3, IDENTIFIER_LENGTH (identifier));
14238 ASSERT_STREQ ("foo", IDENTIFIER_POINTER (identifier));
14239 }
14240
14241 /* Verify LABEL_DECL. */
14242
14243 static void
14244 test_labels ()
14245 {
14246 tree identifier = get_identifier ("err");
14247 tree label_decl = build_decl (UNKNOWN_LOCATION, LABEL_DECL,
14248 identifier, void_type_node);
14249 ASSERT_EQ (-1, LABEL_DECL_UID (label_decl));
14250 ASSERT_FALSE (FORCED_LABEL (label_decl));
14251 }
14252
14253 /* Run all of the selftests within this file. */
14254
14255 void
14256 tree_c_tests ()
14257 {
14258 test_integer_constants ();
14259 test_identifiers ();
14260 test_labels ();
14261 }
14262
14263 } // namespace selftest
14264
14265 #endif /* CHECKING_P */
14266
14267 #include "gt-tree.h"