]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree.c
cse.c: Use HOST_WIDE_INT_M1 instead of ~(HOST_WIDE_INT) 0.
[thirdparty/gcc.git] / gcc / tree.c
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2016 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
26
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "backend.h"
34 #include "target.h"
35 #include "tree.h"
36 #include "gimple.h"
37 #include "tree-pass.h"
38 #include "ssa.h"
39 #include "cgraph.h"
40 #include "diagnostic.h"
41 #include "flags.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "attribs.h"
47 #include "toplev.h" /* get_random_seed */
48 #include "output.h"
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
55 #include "gimplify.h"
56 #include "tree-dfa.h"
57 #include "params.h"
58 #include "langhooks-def.h"
59 #include "tree-diagnostic.h"
60 #include "except.h"
61 #include "builtins.h"
62 #include "print-tree.h"
63 #include "ipa-utils.h"
64 #include "selftest.h"
65
66 /* Tree code classes. */
67
68 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
69 #define END_OF_BASE_TREE_CODES tcc_exceptional,
70
71 const enum tree_code_class tree_code_type[] = {
72 #include "all-tree.def"
73 };
74
75 #undef DEFTREECODE
76 #undef END_OF_BASE_TREE_CODES
77
78 /* Table indexed by tree code giving number of expression
79 operands beyond the fixed part of the node structure.
80 Not used for types or decls. */
81
82 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
83 #define END_OF_BASE_TREE_CODES 0,
84
85 const unsigned char tree_code_length[] = {
86 #include "all-tree.def"
87 };
88
89 #undef DEFTREECODE
90 #undef END_OF_BASE_TREE_CODES
91
92 /* Names of tree components.
93 Used for printing out the tree and error messages. */
94 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
95 #define END_OF_BASE_TREE_CODES "@dummy",
96
97 static const char *const tree_code_name[] = {
98 #include "all-tree.def"
99 };
100
101 #undef DEFTREECODE
102 #undef END_OF_BASE_TREE_CODES
103
104 /* Each tree code class has an associated string representation.
105 These must correspond to the tree_code_class entries. */
106
107 const char *const tree_code_class_strings[] =
108 {
109 "exceptional",
110 "constant",
111 "type",
112 "declaration",
113 "reference",
114 "comparison",
115 "unary",
116 "binary",
117 "statement",
118 "vl_exp",
119 "expression"
120 };
121
122 /* obstack.[ch] explicitly declined to prototype this. */
123 extern int _obstack_allocated_p (struct obstack *h, void *obj);
124
125 /* Statistics-gathering stuff. */
126
127 static int tree_code_counts[MAX_TREE_CODES];
128 int tree_node_counts[(int) all_kinds];
129 int tree_node_sizes[(int) all_kinds];
130
131 /* Keep in sync with tree.h:enum tree_node_kind. */
132 static const char * const tree_node_kind_names[] = {
133 "decls",
134 "types",
135 "blocks",
136 "stmts",
137 "refs",
138 "exprs",
139 "constants",
140 "identifiers",
141 "vecs",
142 "binfos",
143 "ssa names",
144 "constructors",
145 "random kinds",
146 "lang_decl kinds",
147 "lang_type kinds",
148 "omp clauses",
149 };
150
151 /* Unique id for next decl created. */
152 static GTY(()) int next_decl_uid;
153 /* Unique id for next type created. */
154 static GTY(()) int next_type_uid = 1;
155 /* Unique id for next debug decl created. Use negative numbers,
156 to catch erroneous uses. */
157 static GTY(()) int next_debug_decl_uid;
158
159 /* Since we cannot rehash a type after it is in the table, we have to
160 keep the hash code. */
161
162 struct GTY((for_user)) type_hash {
163 unsigned long hash;
164 tree type;
165 };
166
167 /* Initial size of the hash table (rounded to next prime). */
168 #define TYPE_HASH_INITIAL_SIZE 1000
169
170 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash>
171 {
172 static hashval_t hash (type_hash *t) { return t->hash; }
173 static bool equal (type_hash *a, type_hash *b);
174
175 static int
176 keep_cache_entry (type_hash *&t)
177 {
178 return ggc_marked_p (t->type);
179 }
180 };
181
182 /* Now here is the hash table. When recording a type, it is added to
183 the slot whose index is the hash code. Note that the hash table is
184 used for several kinds of types (function types, array types and
185 array index range types, for now). While all these live in the
186 same table, they are completely independent, and the hash code is
187 computed differently for each of these. */
188
189 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
190
191 /* Hash table and temporary node for larger integer const values. */
192 static GTY (()) tree int_cst_node;
193
194 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
195 {
196 static hashval_t hash (tree t);
197 static bool equal (tree x, tree y);
198 };
199
200 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
201
202 /* Hash table for optimization flags and target option flags. Use the same
203 hash table for both sets of options. Nodes for building the current
204 optimization and target option nodes. The assumption is most of the time
205 the options created will already be in the hash table, so we avoid
206 allocating and freeing up a node repeatably. */
207 static GTY (()) tree cl_optimization_node;
208 static GTY (()) tree cl_target_option_node;
209
210 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node>
211 {
212 static hashval_t hash (tree t);
213 static bool equal (tree x, tree y);
214 };
215
216 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
217
218 /* General tree->tree mapping structure for use in hash tables. */
219
220
221 static GTY ((cache))
222 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
223
224 static GTY ((cache))
225 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
226
227 struct tree_vec_map_cache_hasher : ggc_cache_ptr_hash<tree_vec_map>
228 {
229 static hashval_t hash (tree_vec_map *m) { return DECL_UID (m->base.from); }
230
231 static bool
232 equal (tree_vec_map *a, tree_vec_map *b)
233 {
234 return a->base.from == b->base.from;
235 }
236
237 static int
238 keep_cache_entry (tree_vec_map *&m)
239 {
240 return ggc_marked_p (m->base.from);
241 }
242 };
243
244 static GTY ((cache))
245 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
246
247 static void set_type_quals (tree, int);
248 static void print_type_hash_statistics (void);
249 static void print_debug_expr_statistics (void);
250 static void print_value_expr_statistics (void);
251 static void type_hash_list (const_tree, inchash::hash &);
252 static void attribute_hash_list (const_tree, inchash::hash &);
253
254 tree global_trees[TI_MAX];
255 tree integer_types[itk_none];
256
257 bool int_n_enabled_p[NUM_INT_N_ENTS];
258 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
259
260 unsigned char tree_contains_struct[MAX_TREE_CODES][64];
261
262 /* Number of operands for each OpenMP clause. */
263 unsigned const char omp_clause_num_ops[] =
264 {
265 0, /* OMP_CLAUSE_ERROR */
266 1, /* OMP_CLAUSE_PRIVATE */
267 1, /* OMP_CLAUSE_SHARED */
268 1, /* OMP_CLAUSE_FIRSTPRIVATE */
269 2, /* OMP_CLAUSE_LASTPRIVATE */
270 5, /* OMP_CLAUSE_REDUCTION */
271 1, /* OMP_CLAUSE_COPYIN */
272 1, /* OMP_CLAUSE_COPYPRIVATE */
273 3, /* OMP_CLAUSE_LINEAR */
274 2, /* OMP_CLAUSE_ALIGNED */
275 1, /* OMP_CLAUSE_DEPEND */
276 1, /* OMP_CLAUSE_UNIFORM */
277 1, /* OMP_CLAUSE_TO_DECLARE */
278 1, /* OMP_CLAUSE_LINK */
279 2, /* OMP_CLAUSE_FROM */
280 2, /* OMP_CLAUSE_TO */
281 2, /* OMP_CLAUSE_MAP */
282 1, /* OMP_CLAUSE_USE_DEVICE_PTR */
283 1, /* OMP_CLAUSE_IS_DEVICE_PTR */
284 2, /* OMP_CLAUSE__CACHE_ */
285 2, /* OMP_CLAUSE_GANG */
286 1, /* OMP_CLAUSE_ASYNC */
287 1, /* OMP_CLAUSE_WAIT */
288 0, /* OMP_CLAUSE_AUTO */
289 0, /* OMP_CLAUSE_SEQ */
290 1, /* OMP_CLAUSE__LOOPTEMP_ */
291 1, /* OMP_CLAUSE_IF */
292 1, /* OMP_CLAUSE_NUM_THREADS */
293 1, /* OMP_CLAUSE_SCHEDULE */
294 0, /* OMP_CLAUSE_NOWAIT */
295 1, /* OMP_CLAUSE_ORDERED */
296 0, /* OMP_CLAUSE_DEFAULT */
297 3, /* OMP_CLAUSE_COLLAPSE */
298 0, /* OMP_CLAUSE_UNTIED */
299 1, /* OMP_CLAUSE_FINAL */
300 0, /* OMP_CLAUSE_MERGEABLE */
301 1, /* OMP_CLAUSE_DEVICE */
302 1, /* OMP_CLAUSE_DIST_SCHEDULE */
303 0, /* OMP_CLAUSE_INBRANCH */
304 0, /* OMP_CLAUSE_NOTINBRANCH */
305 1, /* OMP_CLAUSE_NUM_TEAMS */
306 1, /* OMP_CLAUSE_THREAD_LIMIT */
307 0, /* OMP_CLAUSE_PROC_BIND */
308 1, /* OMP_CLAUSE_SAFELEN */
309 1, /* OMP_CLAUSE_SIMDLEN */
310 0, /* OMP_CLAUSE_FOR */
311 0, /* OMP_CLAUSE_PARALLEL */
312 0, /* OMP_CLAUSE_SECTIONS */
313 0, /* OMP_CLAUSE_TASKGROUP */
314 1, /* OMP_CLAUSE_PRIORITY */
315 1, /* OMP_CLAUSE_GRAINSIZE */
316 1, /* OMP_CLAUSE_NUM_TASKS */
317 0, /* OMP_CLAUSE_NOGROUP */
318 0, /* OMP_CLAUSE_THREADS */
319 0, /* OMP_CLAUSE_SIMD */
320 1, /* OMP_CLAUSE_HINT */
321 0, /* OMP_CLAUSE_DEFALTMAP */
322 1, /* OMP_CLAUSE__SIMDUID_ */
323 1, /* OMP_CLAUSE__CILK_FOR_COUNT_ */
324 0, /* OMP_CLAUSE_INDEPENDENT */
325 1, /* OMP_CLAUSE_WORKER */
326 1, /* OMP_CLAUSE_VECTOR */
327 1, /* OMP_CLAUSE_NUM_GANGS */
328 1, /* OMP_CLAUSE_NUM_WORKERS */
329 1, /* OMP_CLAUSE_VECTOR_LENGTH */
330 1, /* OMP_CLAUSE_TILE */
331 2, /* OMP_CLAUSE__GRIDDIM_ */
332 };
333
334 const char * const omp_clause_code_name[] =
335 {
336 "error_clause",
337 "private",
338 "shared",
339 "firstprivate",
340 "lastprivate",
341 "reduction",
342 "copyin",
343 "copyprivate",
344 "linear",
345 "aligned",
346 "depend",
347 "uniform",
348 "to",
349 "link",
350 "from",
351 "to",
352 "map",
353 "use_device_ptr",
354 "is_device_ptr",
355 "_cache_",
356 "gang",
357 "async",
358 "wait",
359 "auto",
360 "seq",
361 "_looptemp_",
362 "if",
363 "num_threads",
364 "schedule",
365 "nowait",
366 "ordered",
367 "default",
368 "collapse",
369 "untied",
370 "final",
371 "mergeable",
372 "device",
373 "dist_schedule",
374 "inbranch",
375 "notinbranch",
376 "num_teams",
377 "thread_limit",
378 "proc_bind",
379 "safelen",
380 "simdlen",
381 "for",
382 "parallel",
383 "sections",
384 "taskgroup",
385 "priority",
386 "grainsize",
387 "num_tasks",
388 "nogroup",
389 "threads",
390 "simd",
391 "hint",
392 "defaultmap",
393 "_simduid_",
394 "_Cilk_for_count_",
395 "independent",
396 "worker",
397 "vector",
398 "num_gangs",
399 "num_workers",
400 "vector_length",
401 "tile",
402 "_griddim_"
403 };
404
405
406 /* Return the tree node structure used by tree code CODE. */
407
408 static inline enum tree_node_structure_enum
409 tree_node_structure_for_code (enum tree_code code)
410 {
411 switch (TREE_CODE_CLASS (code))
412 {
413 case tcc_declaration:
414 {
415 switch (code)
416 {
417 case FIELD_DECL:
418 return TS_FIELD_DECL;
419 case PARM_DECL:
420 return TS_PARM_DECL;
421 case VAR_DECL:
422 return TS_VAR_DECL;
423 case LABEL_DECL:
424 return TS_LABEL_DECL;
425 case RESULT_DECL:
426 return TS_RESULT_DECL;
427 case DEBUG_EXPR_DECL:
428 return TS_DECL_WRTL;
429 case CONST_DECL:
430 return TS_CONST_DECL;
431 case TYPE_DECL:
432 return TS_TYPE_DECL;
433 case FUNCTION_DECL:
434 return TS_FUNCTION_DECL;
435 case TRANSLATION_UNIT_DECL:
436 return TS_TRANSLATION_UNIT_DECL;
437 default:
438 return TS_DECL_NON_COMMON;
439 }
440 }
441 case tcc_type:
442 return TS_TYPE_NON_COMMON;
443 case tcc_reference:
444 case tcc_comparison:
445 case tcc_unary:
446 case tcc_binary:
447 case tcc_expression:
448 case tcc_statement:
449 case tcc_vl_exp:
450 return TS_EXP;
451 default: /* tcc_constant and tcc_exceptional */
452 break;
453 }
454 switch (code)
455 {
456 /* tcc_constant cases. */
457 case VOID_CST: return TS_TYPED;
458 case INTEGER_CST: return TS_INT_CST;
459 case REAL_CST: return TS_REAL_CST;
460 case FIXED_CST: return TS_FIXED_CST;
461 case COMPLEX_CST: return TS_COMPLEX;
462 case VECTOR_CST: return TS_VECTOR;
463 case STRING_CST: return TS_STRING;
464 /* tcc_exceptional cases. */
465 case ERROR_MARK: return TS_COMMON;
466 case IDENTIFIER_NODE: return TS_IDENTIFIER;
467 case TREE_LIST: return TS_LIST;
468 case TREE_VEC: return TS_VEC;
469 case SSA_NAME: return TS_SSA_NAME;
470 case PLACEHOLDER_EXPR: return TS_COMMON;
471 case STATEMENT_LIST: return TS_STATEMENT_LIST;
472 case BLOCK: return TS_BLOCK;
473 case CONSTRUCTOR: return TS_CONSTRUCTOR;
474 case TREE_BINFO: return TS_BINFO;
475 case OMP_CLAUSE: return TS_OMP_CLAUSE;
476 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
477 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
478
479 default:
480 gcc_unreachable ();
481 }
482 }
483
484
485 /* Initialize tree_contains_struct to describe the hierarchy of tree
486 nodes. */
487
488 static void
489 initialize_tree_contains_struct (void)
490 {
491 unsigned i;
492
493 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
494 {
495 enum tree_code code;
496 enum tree_node_structure_enum ts_code;
497
498 code = (enum tree_code) i;
499 ts_code = tree_node_structure_for_code (code);
500
501 /* Mark the TS structure itself. */
502 tree_contains_struct[code][ts_code] = 1;
503
504 /* Mark all the structures that TS is derived from. */
505 switch (ts_code)
506 {
507 case TS_TYPED:
508 case TS_BLOCK:
509 MARK_TS_BASE (code);
510 break;
511
512 case TS_COMMON:
513 case TS_INT_CST:
514 case TS_REAL_CST:
515 case TS_FIXED_CST:
516 case TS_VECTOR:
517 case TS_STRING:
518 case TS_COMPLEX:
519 case TS_SSA_NAME:
520 case TS_CONSTRUCTOR:
521 case TS_EXP:
522 case TS_STATEMENT_LIST:
523 MARK_TS_TYPED (code);
524 break;
525
526 case TS_IDENTIFIER:
527 case TS_DECL_MINIMAL:
528 case TS_TYPE_COMMON:
529 case TS_LIST:
530 case TS_VEC:
531 case TS_BINFO:
532 case TS_OMP_CLAUSE:
533 case TS_OPTIMIZATION:
534 case TS_TARGET_OPTION:
535 MARK_TS_COMMON (code);
536 break;
537
538 case TS_TYPE_WITH_LANG_SPECIFIC:
539 MARK_TS_TYPE_COMMON (code);
540 break;
541
542 case TS_TYPE_NON_COMMON:
543 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
544 break;
545
546 case TS_DECL_COMMON:
547 MARK_TS_DECL_MINIMAL (code);
548 break;
549
550 case TS_DECL_WRTL:
551 case TS_CONST_DECL:
552 MARK_TS_DECL_COMMON (code);
553 break;
554
555 case TS_DECL_NON_COMMON:
556 MARK_TS_DECL_WITH_VIS (code);
557 break;
558
559 case TS_DECL_WITH_VIS:
560 case TS_PARM_DECL:
561 case TS_LABEL_DECL:
562 case TS_RESULT_DECL:
563 MARK_TS_DECL_WRTL (code);
564 break;
565
566 case TS_FIELD_DECL:
567 MARK_TS_DECL_COMMON (code);
568 break;
569
570 case TS_VAR_DECL:
571 MARK_TS_DECL_WITH_VIS (code);
572 break;
573
574 case TS_TYPE_DECL:
575 case TS_FUNCTION_DECL:
576 MARK_TS_DECL_NON_COMMON (code);
577 break;
578
579 case TS_TRANSLATION_UNIT_DECL:
580 MARK_TS_DECL_COMMON (code);
581 break;
582
583 default:
584 gcc_unreachable ();
585 }
586 }
587
588 /* Basic consistency checks for attributes used in fold. */
589 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
590 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
591 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
592 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
593 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
594 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
595 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
596 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
597 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
598 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
599 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
600 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
601 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
602 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
603 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
604 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
605 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
606 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
607 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
608 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
609 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
610 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
611 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
612 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
613 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
614 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
615 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
616 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
617 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
618 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
619 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
620 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
621 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
622 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
623 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
624 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
625 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
626 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
627 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
628 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
629 }
630
631
632 /* Init tree.c. */
633
634 void
635 init_ttree (void)
636 {
637 /* Initialize the hash table of types. */
638 type_hash_table
639 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
640
641 debug_expr_for_decl
642 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
643
644 value_expr_for_decl
645 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
646
647 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
648
649 int_cst_node = make_int_cst (1, 1);
650
651 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
652
653 cl_optimization_node = make_node (OPTIMIZATION_NODE);
654 cl_target_option_node = make_node (TARGET_OPTION_NODE);
655
656 /* Initialize the tree_contains_struct array. */
657 initialize_tree_contains_struct ();
658 lang_hooks.init_ts ();
659 }
660
661 \f
662 /* The name of the object as the assembler will see it (but before any
663 translations made by ASM_OUTPUT_LABELREF). Often this is the same
664 as DECL_NAME. It is an IDENTIFIER_NODE. */
665 tree
666 decl_assembler_name (tree decl)
667 {
668 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
669 lang_hooks.set_decl_assembler_name (decl);
670 return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name;
671 }
672
673 /* When the target supports COMDAT groups, this indicates which group the
674 DECL is associated with. This can be either an IDENTIFIER_NODE or a
675 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
676 tree
677 decl_comdat_group (const_tree node)
678 {
679 struct symtab_node *snode = symtab_node::get (node);
680 if (!snode)
681 return NULL;
682 return snode->get_comdat_group ();
683 }
684
685 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
686 tree
687 decl_comdat_group_id (const_tree node)
688 {
689 struct symtab_node *snode = symtab_node::get (node);
690 if (!snode)
691 return NULL;
692 return snode->get_comdat_group_id ();
693 }
694
695 /* When the target supports named section, return its name as IDENTIFIER_NODE
696 or NULL if it is in no section. */
697 const char *
698 decl_section_name (const_tree node)
699 {
700 struct symtab_node *snode = symtab_node::get (node);
701 if (!snode)
702 return NULL;
703 return snode->get_section ();
704 }
705
706 /* Set section name of NODE to VALUE (that is expected to be
707 identifier node) */
708 void
709 set_decl_section_name (tree node, const char *value)
710 {
711 struct symtab_node *snode;
712
713 if (value == NULL)
714 {
715 snode = symtab_node::get (node);
716 if (!snode)
717 return;
718 }
719 else if (TREE_CODE (node) == VAR_DECL)
720 snode = varpool_node::get_create (node);
721 else
722 snode = cgraph_node::get_create (node);
723 snode->set_section (value);
724 }
725
726 /* Return TLS model of a variable NODE. */
727 enum tls_model
728 decl_tls_model (const_tree node)
729 {
730 struct varpool_node *snode = varpool_node::get (node);
731 if (!snode)
732 return TLS_MODEL_NONE;
733 return snode->tls_model;
734 }
735
736 /* Set TLS model of variable NODE to MODEL. */
737 void
738 set_decl_tls_model (tree node, enum tls_model model)
739 {
740 struct varpool_node *vnode;
741
742 if (model == TLS_MODEL_NONE)
743 {
744 vnode = varpool_node::get (node);
745 if (!vnode)
746 return;
747 }
748 else
749 vnode = varpool_node::get_create (node);
750 vnode->tls_model = model;
751 }
752
753 /* Compute the number of bytes occupied by a tree with code CODE.
754 This function cannot be used for nodes that have variable sizes,
755 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
756 size_t
757 tree_code_size (enum tree_code code)
758 {
759 switch (TREE_CODE_CLASS (code))
760 {
761 case tcc_declaration: /* A decl node */
762 {
763 switch (code)
764 {
765 case FIELD_DECL:
766 return sizeof (struct tree_field_decl);
767 case PARM_DECL:
768 return sizeof (struct tree_parm_decl);
769 case VAR_DECL:
770 return sizeof (struct tree_var_decl);
771 case LABEL_DECL:
772 return sizeof (struct tree_label_decl);
773 case RESULT_DECL:
774 return sizeof (struct tree_result_decl);
775 case CONST_DECL:
776 return sizeof (struct tree_const_decl);
777 case TYPE_DECL:
778 return sizeof (struct tree_type_decl);
779 case FUNCTION_DECL:
780 return sizeof (struct tree_function_decl);
781 case DEBUG_EXPR_DECL:
782 return sizeof (struct tree_decl_with_rtl);
783 case TRANSLATION_UNIT_DECL:
784 return sizeof (struct tree_translation_unit_decl);
785 case NAMESPACE_DECL:
786 case IMPORTED_DECL:
787 case NAMELIST_DECL:
788 return sizeof (struct tree_decl_non_common);
789 default:
790 return lang_hooks.tree_size (code);
791 }
792 }
793
794 case tcc_type: /* a type node */
795 return sizeof (struct tree_type_non_common);
796
797 case tcc_reference: /* a reference */
798 case tcc_expression: /* an expression */
799 case tcc_statement: /* an expression with side effects */
800 case tcc_comparison: /* a comparison expression */
801 case tcc_unary: /* a unary arithmetic expression */
802 case tcc_binary: /* a binary arithmetic expression */
803 return (sizeof (struct tree_exp)
804 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
805
806 case tcc_constant: /* a constant */
807 switch (code)
808 {
809 case VOID_CST: return sizeof (struct tree_typed);
810 case INTEGER_CST: gcc_unreachable ();
811 case REAL_CST: return sizeof (struct tree_real_cst);
812 case FIXED_CST: return sizeof (struct tree_fixed_cst);
813 case COMPLEX_CST: return sizeof (struct tree_complex);
814 case VECTOR_CST: return sizeof (struct tree_vector);
815 case STRING_CST: gcc_unreachable ();
816 default:
817 return lang_hooks.tree_size (code);
818 }
819
820 case tcc_exceptional: /* something random, like an identifier. */
821 switch (code)
822 {
823 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
824 case TREE_LIST: return sizeof (struct tree_list);
825
826 case ERROR_MARK:
827 case PLACEHOLDER_EXPR: return sizeof (struct tree_common);
828
829 case TREE_VEC:
830 case OMP_CLAUSE: gcc_unreachable ();
831
832 case SSA_NAME: return sizeof (struct tree_ssa_name);
833
834 case STATEMENT_LIST: return sizeof (struct tree_statement_list);
835 case BLOCK: return sizeof (struct tree_block);
836 case CONSTRUCTOR: return sizeof (struct tree_constructor);
837 case OPTIMIZATION_NODE: return sizeof (struct tree_optimization_option);
838 case TARGET_OPTION_NODE: return sizeof (struct tree_target_option);
839
840 default:
841 return lang_hooks.tree_size (code);
842 }
843
844 default:
845 gcc_unreachable ();
846 }
847 }
848
849 /* Compute the number of bytes occupied by NODE. This routine only
850 looks at TREE_CODE, except for those nodes that have variable sizes. */
851 size_t
852 tree_size (const_tree node)
853 {
854 const enum tree_code code = TREE_CODE (node);
855 switch (code)
856 {
857 case INTEGER_CST:
858 return (sizeof (struct tree_int_cst)
859 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
860
861 case TREE_BINFO:
862 return (offsetof (struct tree_binfo, base_binfos)
863 + vec<tree, va_gc>
864 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
865
866 case TREE_VEC:
867 return (sizeof (struct tree_vec)
868 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
869
870 case VECTOR_CST:
871 return (sizeof (struct tree_vector)
872 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node)) - 1) * sizeof (tree));
873
874 case STRING_CST:
875 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
876
877 case OMP_CLAUSE:
878 return (sizeof (struct tree_omp_clause)
879 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
880 * sizeof (tree));
881
882 default:
883 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
884 return (sizeof (struct tree_exp)
885 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
886 else
887 return tree_code_size (code);
888 }
889 }
890
891 /* Record interesting allocation statistics for a tree node with CODE
892 and LENGTH. */
893
894 static void
895 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
896 size_t length ATTRIBUTE_UNUSED)
897 {
898 enum tree_code_class type = TREE_CODE_CLASS (code);
899 tree_node_kind kind;
900
901 if (!GATHER_STATISTICS)
902 return;
903
904 switch (type)
905 {
906 case tcc_declaration: /* A decl node */
907 kind = d_kind;
908 break;
909
910 case tcc_type: /* a type node */
911 kind = t_kind;
912 break;
913
914 case tcc_statement: /* an expression with side effects */
915 kind = s_kind;
916 break;
917
918 case tcc_reference: /* a reference */
919 kind = r_kind;
920 break;
921
922 case tcc_expression: /* an expression */
923 case tcc_comparison: /* a comparison expression */
924 case tcc_unary: /* a unary arithmetic expression */
925 case tcc_binary: /* a binary arithmetic expression */
926 kind = e_kind;
927 break;
928
929 case tcc_constant: /* a constant */
930 kind = c_kind;
931 break;
932
933 case tcc_exceptional: /* something random, like an identifier. */
934 switch (code)
935 {
936 case IDENTIFIER_NODE:
937 kind = id_kind;
938 break;
939
940 case TREE_VEC:
941 kind = vec_kind;
942 break;
943
944 case TREE_BINFO:
945 kind = binfo_kind;
946 break;
947
948 case SSA_NAME:
949 kind = ssa_name_kind;
950 break;
951
952 case BLOCK:
953 kind = b_kind;
954 break;
955
956 case CONSTRUCTOR:
957 kind = constr_kind;
958 break;
959
960 case OMP_CLAUSE:
961 kind = omp_clause_kind;
962 break;
963
964 default:
965 kind = x_kind;
966 break;
967 }
968 break;
969
970 case tcc_vl_exp:
971 kind = e_kind;
972 break;
973
974 default:
975 gcc_unreachable ();
976 }
977
978 tree_code_counts[(int) code]++;
979 tree_node_counts[(int) kind]++;
980 tree_node_sizes[(int) kind] += length;
981 }
982
983 /* Allocate and return a new UID from the DECL_UID namespace. */
984
985 int
986 allocate_decl_uid (void)
987 {
988 return next_decl_uid++;
989 }
990
991 /* Return a newly allocated node of code CODE. For decl and type
992 nodes, some other fields are initialized. The rest of the node is
993 initialized to zero. This function cannot be used for TREE_VEC,
994 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
995 tree_code_size.
996
997 Achoo! I got a code in the node. */
998
999 tree
1000 make_node_stat (enum tree_code code MEM_STAT_DECL)
1001 {
1002 tree t;
1003 enum tree_code_class type = TREE_CODE_CLASS (code);
1004 size_t length = tree_code_size (code);
1005
1006 record_node_allocation_statistics (code, length);
1007
1008 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1009 TREE_SET_CODE (t, code);
1010
1011 switch (type)
1012 {
1013 case tcc_statement:
1014 TREE_SIDE_EFFECTS (t) = 1;
1015 break;
1016
1017 case tcc_declaration:
1018 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1019 {
1020 if (code == FUNCTION_DECL)
1021 {
1022 SET_DECL_ALIGN (t, FUNCTION_BOUNDARY);
1023 DECL_MODE (t) = FUNCTION_MODE;
1024 }
1025 else
1026 SET_DECL_ALIGN (t, 1);
1027 }
1028 DECL_SOURCE_LOCATION (t) = input_location;
1029 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1030 DECL_UID (t) = --next_debug_decl_uid;
1031 else
1032 {
1033 DECL_UID (t) = allocate_decl_uid ();
1034 SET_DECL_PT_UID (t, -1);
1035 }
1036 if (TREE_CODE (t) == LABEL_DECL)
1037 LABEL_DECL_UID (t) = -1;
1038
1039 break;
1040
1041 case tcc_type:
1042 TYPE_UID (t) = next_type_uid++;
1043 SET_TYPE_ALIGN (t, BITS_PER_UNIT);
1044 TYPE_USER_ALIGN (t) = 0;
1045 TYPE_MAIN_VARIANT (t) = t;
1046 TYPE_CANONICAL (t) = t;
1047
1048 /* Default to no attributes for type, but let target change that. */
1049 TYPE_ATTRIBUTES (t) = NULL_TREE;
1050 targetm.set_default_type_attributes (t);
1051
1052 /* We have not yet computed the alias set for this type. */
1053 TYPE_ALIAS_SET (t) = -1;
1054 break;
1055
1056 case tcc_constant:
1057 TREE_CONSTANT (t) = 1;
1058 break;
1059
1060 case tcc_expression:
1061 switch (code)
1062 {
1063 case INIT_EXPR:
1064 case MODIFY_EXPR:
1065 case VA_ARG_EXPR:
1066 case PREDECREMENT_EXPR:
1067 case PREINCREMENT_EXPR:
1068 case POSTDECREMENT_EXPR:
1069 case POSTINCREMENT_EXPR:
1070 /* All of these have side-effects, no matter what their
1071 operands are. */
1072 TREE_SIDE_EFFECTS (t) = 1;
1073 break;
1074
1075 default:
1076 break;
1077 }
1078 break;
1079
1080 case tcc_exceptional:
1081 switch (code)
1082 {
1083 case TARGET_OPTION_NODE:
1084 TREE_TARGET_OPTION(t)
1085 = ggc_cleared_alloc<struct cl_target_option> ();
1086 break;
1087
1088 case OPTIMIZATION_NODE:
1089 TREE_OPTIMIZATION (t)
1090 = ggc_cleared_alloc<struct cl_optimization> ();
1091 break;
1092
1093 default:
1094 break;
1095 }
1096 break;
1097
1098 default:
1099 /* Other classes need no special treatment. */
1100 break;
1101 }
1102
1103 return t;
1104 }
1105
1106 /* Free tree node. */
1107
1108 void
1109 free_node (tree node)
1110 {
1111 enum tree_code code = TREE_CODE (node);
1112 if (GATHER_STATISTICS)
1113 {
1114 tree_code_counts[(int) TREE_CODE (node)]--;
1115 tree_node_counts[(int) t_kind]--;
1116 tree_node_sizes[(int) t_kind] -= tree_size (node);
1117 }
1118 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1119 vec_free (CONSTRUCTOR_ELTS (node));
1120 else if (code == BLOCK)
1121 vec_free (BLOCK_NONLOCALIZED_VARS (node));
1122 else if (code == TREE_BINFO)
1123 vec_free (BINFO_BASE_ACCESSES (node));
1124 ggc_free (node);
1125 }
1126 \f
1127 /* Return a new node with the same contents as NODE except that its
1128 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1129
1130 tree
1131 copy_node_stat (tree node MEM_STAT_DECL)
1132 {
1133 tree t;
1134 enum tree_code code = TREE_CODE (node);
1135 size_t length;
1136
1137 gcc_assert (code != STATEMENT_LIST);
1138
1139 length = tree_size (node);
1140 record_node_allocation_statistics (code, length);
1141 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1142 memcpy (t, node, length);
1143
1144 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1145 TREE_CHAIN (t) = 0;
1146 TREE_ASM_WRITTEN (t) = 0;
1147 TREE_VISITED (t) = 0;
1148
1149 if (TREE_CODE_CLASS (code) == tcc_declaration)
1150 {
1151 if (code == DEBUG_EXPR_DECL)
1152 DECL_UID (t) = --next_debug_decl_uid;
1153 else
1154 {
1155 DECL_UID (t) = allocate_decl_uid ();
1156 if (DECL_PT_UID_SET_P (node))
1157 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1158 }
1159 if ((TREE_CODE (node) == PARM_DECL || TREE_CODE (node) == VAR_DECL)
1160 && DECL_HAS_VALUE_EXPR_P (node))
1161 {
1162 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1163 DECL_HAS_VALUE_EXPR_P (t) = 1;
1164 }
1165 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1166 if (TREE_CODE (node) == VAR_DECL)
1167 {
1168 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1169 t->decl_with_vis.symtab_node = NULL;
1170 }
1171 if (TREE_CODE (node) == VAR_DECL && DECL_HAS_INIT_PRIORITY_P (node))
1172 {
1173 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1174 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1175 }
1176 if (TREE_CODE (node) == FUNCTION_DECL)
1177 {
1178 DECL_STRUCT_FUNCTION (t) = NULL;
1179 t->decl_with_vis.symtab_node = NULL;
1180 }
1181 }
1182 else if (TREE_CODE_CLASS (code) == tcc_type)
1183 {
1184 TYPE_UID (t) = next_type_uid++;
1185 /* The following is so that the debug code for
1186 the copy is different from the original type.
1187 The two statements usually duplicate each other
1188 (because they clear fields of the same union),
1189 but the optimizer should catch that. */
1190 TYPE_SYMTAB_POINTER (t) = 0;
1191 TYPE_SYMTAB_ADDRESS (t) = 0;
1192
1193 /* Do not copy the values cache. */
1194 if (TYPE_CACHED_VALUES_P (t))
1195 {
1196 TYPE_CACHED_VALUES_P (t) = 0;
1197 TYPE_CACHED_VALUES (t) = NULL_TREE;
1198 }
1199 }
1200 else if (code == TARGET_OPTION_NODE)
1201 {
1202 TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>();
1203 memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node),
1204 sizeof (struct cl_target_option));
1205 }
1206 else if (code == OPTIMIZATION_NODE)
1207 {
1208 TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>();
1209 memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node),
1210 sizeof (struct cl_optimization));
1211 }
1212
1213 return t;
1214 }
1215
1216 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1217 For example, this can copy a list made of TREE_LIST nodes. */
1218
1219 tree
1220 copy_list (tree list)
1221 {
1222 tree head;
1223 tree prev, next;
1224
1225 if (list == 0)
1226 return 0;
1227
1228 head = prev = copy_node (list);
1229 next = TREE_CHAIN (list);
1230 while (next)
1231 {
1232 TREE_CHAIN (prev) = copy_node (next);
1233 prev = TREE_CHAIN (prev);
1234 next = TREE_CHAIN (next);
1235 }
1236 return head;
1237 }
1238
1239 \f
1240 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1241 INTEGER_CST with value CST and type TYPE. */
1242
1243 static unsigned int
1244 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1245 {
1246 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1247 /* We need extra HWIs if CST is an unsigned integer with its
1248 upper bit set. */
1249 if (TYPE_UNSIGNED (type) && wi::neg_p (cst))
1250 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1251 return cst.get_len ();
1252 }
1253
1254 /* Return a new INTEGER_CST with value CST and type TYPE. */
1255
1256 static tree
1257 build_new_int_cst (tree type, const wide_int &cst)
1258 {
1259 unsigned int len = cst.get_len ();
1260 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1261 tree nt = make_int_cst (len, ext_len);
1262
1263 if (len < ext_len)
1264 {
1265 --ext_len;
1266 TREE_INT_CST_ELT (nt, ext_len)
1267 = zext_hwi (-1, cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1268 for (unsigned int i = len; i < ext_len; ++i)
1269 TREE_INT_CST_ELT (nt, i) = -1;
1270 }
1271 else if (TYPE_UNSIGNED (type)
1272 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1273 {
1274 len--;
1275 TREE_INT_CST_ELT (nt, len)
1276 = zext_hwi (cst.elt (len),
1277 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1278 }
1279
1280 for (unsigned int i = 0; i < len; i++)
1281 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1282 TREE_TYPE (nt) = type;
1283 return nt;
1284 }
1285
1286 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1287
1288 tree
1289 build_int_cst (tree type, HOST_WIDE_INT low)
1290 {
1291 /* Support legacy code. */
1292 if (!type)
1293 type = integer_type_node;
1294
1295 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1296 }
1297
1298 tree
1299 build_int_cstu (tree type, unsigned HOST_WIDE_INT cst)
1300 {
1301 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1302 }
1303
1304 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1305
1306 tree
1307 build_int_cst_type (tree type, HOST_WIDE_INT low)
1308 {
1309 gcc_assert (type);
1310 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1311 }
1312
1313 /* Constructs tree in type TYPE from with value given by CST. Signedness
1314 of CST is assumed to be the same as the signedness of TYPE. */
1315
1316 tree
1317 double_int_to_tree (tree type, double_int cst)
1318 {
1319 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1320 }
1321
1322 /* We force the wide_int CST to the range of the type TYPE by sign or
1323 zero extending it. OVERFLOWABLE indicates if we are interested in
1324 overflow of the value, when >0 we are only interested in signed
1325 overflow, for <0 we are interested in any overflow. OVERFLOWED
1326 indicates whether overflow has already occurred. CONST_OVERFLOWED
1327 indicates whether constant overflow has already occurred. We force
1328 T's value to be within range of T's type (by setting to 0 or 1 all
1329 the bits outside the type's range). We set TREE_OVERFLOWED if,
1330 OVERFLOWED is nonzero,
1331 or OVERFLOWABLE is >0 and signed overflow occurs
1332 or OVERFLOWABLE is <0 and any overflow occurs
1333 We return a new tree node for the extended wide_int. The node
1334 is shared if no overflow flags are set. */
1335
1336
1337 tree
1338 force_fit_type (tree type, const wide_int_ref &cst,
1339 int overflowable, bool overflowed)
1340 {
1341 signop sign = TYPE_SIGN (type);
1342
1343 /* If we need to set overflow flags, return a new unshared node. */
1344 if (overflowed || !wi::fits_to_tree_p (cst, type))
1345 {
1346 if (overflowed
1347 || overflowable < 0
1348 || (overflowable > 0 && sign == SIGNED))
1349 {
1350 wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign);
1351 tree t = build_new_int_cst (type, tmp);
1352 TREE_OVERFLOW (t) = 1;
1353 return t;
1354 }
1355 }
1356
1357 /* Else build a shared node. */
1358 return wide_int_to_tree (type, cst);
1359 }
1360
1361 /* These are the hash table functions for the hash table of INTEGER_CST
1362 nodes of a sizetype. */
1363
1364 /* Return the hash code X, an INTEGER_CST. */
1365
1366 hashval_t
1367 int_cst_hasher::hash (tree x)
1368 {
1369 const_tree const t = x;
1370 hashval_t code = TYPE_UID (TREE_TYPE (t));
1371 int i;
1372
1373 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1374 code = iterative_hash_host_wide_int (TREE_INT_CST_ELT(t, i), code);
1375
1376 return code;
1377 }
1378
1379 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1380 is the same as that given by *Y, which is the same. */
1381
1382 bool
1383 int_cst_hasher::equal (tree x, tree y)
1384 {
1385 const_tree const xt = x;
1386 const_tree const yt = y;
1387
1388 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1389 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1390 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1391 return false;
1392
1393 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1394 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1395 return false;
1396
1397 return true;
1398 }
1399
1400 /* Create an INT_CST node of TYPE and value CST.
1401 The returned node is always shared. For small integers we use a
1402 per-type vector cache, for larger ones we use a single hash table.
1403 The value is extended from its precision according to the sign of
1404 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1405 the upper bits and ensures that hashing and value equality based
1406 upon the underlying HOST_WIDE_INTs works without masking. */
1407
1408 tree
1409 wide_int_to_tree (tree type, const wide_int_ref &pcst)
1410 {
1411 tree t;
1412 int ix = -1;
1413 int limit = 0;
1414
1415 gcc_assert (type);
1416 unsigned int prec = TYPE_PRECISION (type);
1417 signop sgn = TYPE_SIGN (type);
1418
1419 /* Verify that everything is canonical. */
1420 int l = pcst.get_len ();
1421 if (l > 1)
1422 {
1423 if (pcst.elt (l - 1) == 0)
1424 gcc_checking_assert (pcst.elt (l - 2) < 0);
1425 if (pcst.elt (l - 1) == HOST_WIDE_INT_M1)
1426 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1427 }
1428
1429 wide_int cst = wide_int::from (pcst, prec, sgn);
1430 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1431
1432 if (ext_len == 1)
1433 {
1434 /* We just need to store a single HOST_WIDE_INT. */
1435 HOST_WIDE_INT hwi;
1436 if (TYPE_UNSIGNED (type))
1437 hwi = cst.to_uhwi ();
1438 else
1439 hwi = cst.to_shwi ();
1440
1441 switch (TREE_CODE (type))
1442 {
1443 case NULLPTR_TYPE:
1444 gcc_assert (hwi == 0);
1445 /* Fallthru. */
1446
1447 case POINTER_TYPE:
1448 case REFERENCE_TYPE:
1449 case POINTER_BOUNDS_TYPE:
1450 /* Cache NULL pointer and zero bounds. */
1451 if (hwi == 0)
1452 {
1453 limit = 1;
1454 ix = 0;
1455 }
1456 break;
1457
1458 case BOOLEAN_TYPE:
1459 /* Cache false or true. */
1460 limit = 2;
1461 if (IN_RANGE (hwi, 0, 1))
1462 ix = hwi;
1463 break;
1464
1465 case INTEGER_TYPE:
1466 case OFFSET_TYPE:
1467 if (TYPE_SIGN (type) == UNSIGNED)
1468 {
1469 /* Cache [0, N). */
1470 limit = INTEGER_SHARE_LIMIT;
1471 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1))
1472 ix = hwi;
1473 }
1474 else
1475 {
1476 /* Cache [-1, N). */
1477 limit = INTEGER_SHARE_LIMIT + 1;
1478 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1))
1479 ix = hwi + 1;
1480 }
1481 break;
1482
1483 case ENUMERAL_TYPE:
1484 break;
1485
1486 default:
1487 gcc_unreachable ();
1488 }
1489
1490 if (ix >= 0)
1491 {
1492 /* Look for it in the type's vector of small shared ints. */
1493 if (!TYPE_CACHED_VALUES_P (type))
1494 {
1495 TYPE_CACHED_VALUES_P (type) = 1;
1496 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1497 }
1498
1499 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1500 if (t)
1501 /* Make sure no one is clobbering the shared constant. */
1502 gcc_checking_assert (TREE_TYPE (t) == type
1503 && TREE_INT_CST_NUNITS (t) == 1
1504 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1505 && TREE_INT_CST_EXT_NUNITS (t) == 1
1506 && TREE_INT_CST_ELT (t, 0) == hwi);
1507 else
1508 {
1509 /* Create a new shared int. */
1510 t = build_new_int_cst (type, cst);
1511 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1512 }
1513 }
1514 else
1515 {
1516 /* Use the cache of larger shared ints, using int_cst_node as
1517 a temporary. */
1518
1519 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1520 TREE_TYPE (int_cst_node) = type;
1521
1522 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1523 t = *slot;
1524 if (!t)
1525 {
1526 /* Insert this one into the hash table. */
1527 t = int_cst_node;
1528 *slot = t;
1529 /* Make a new node for next time round. */
1530 int_cst_node = make_int_cst (1, 1);
1531 }
1532 }
1533 }
1534 else
1535 {
1536 /* The value either hashes properly or we drop it on the floor
1537 for the gc to take care of. There will not be enough of them
1538 to worry about. */
1539
1540 tree nt = build_new_int_cst (type, cst);
1541 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1542 t = *slot;
1543 if (!t)
1544 {
1545 /* Insert this one into the hash table. */
1546 t = nt;
1547 *slot = t;
1548 }
1549 }
1550
1551 return t;
1552 }
1553
1554 void
1555 cache_integer_cst (tree t)
1556 {
1557 tree type = TREE_TYPE (t);
1558 int ix = -1;
1559 int limit = 0;
1560 int prec = TYPE_PRECISION (type);
1561
1562 gcc_assert (!TREE_OVERFLOW (t));
1563
1564 switch (TREE_CODE (type))
1565 {
1566 case NULLPTR_TYPE:
1567 gcc_assert (integer_zerop (t));
1568 /* Fallthru. */
1569
1570 case POINTER_TYPE:
1571 case REFERENCE_TYPE:
1572 /* Cache NULL pointer. */
1573 if (integer_zerop (t))
1574 {
1575 limit = 1;
1576 ix = 0;
1577 }
1578 break;
1579
1580 case BOOLEAN_TYPE:
1581 /* Cache false or true. */
1582 limit = 2;
1583 if (wi::ltu_p (t, 2))
1584 ix = TREE_INT_CST_ELT (t, 0);
1585 break;
1586
1587 case INTEGER_TYPE:
1588 case OFFSET_TYPE:
1589 if (TYPE_UNSIGNED (type))
1590 {
1591 /* Cache 0..N */
1592 limit = INTEGER_SHARE_LIMIT;
1593
1594 /* This is a little hokie, but if the prec is smaller than
1595 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1596 obvious test will not get the correct answer. */
1597 if (prec < HOST_BITS_PER_WIDE_INT)
1598 {
1599 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1600 ix = tree_to_uhwi (t);
1601 }
1602 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1603 ix = tree_to_uhwi (t);
1604 }
1605 else
1606 {
1607 /* Cache -1..N */
1608 limit = INTEGER_SHARE_LIMIT + 1;
1609
1610 if (integer_minus_onep (t))
1611 ix = 0;
1612 else if (!wi::neg_p (t))
1613 {
1614 if (prec < HOST_BITS_PER_WIDE_INT)
1615 {
1616 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1617 ix = tree_to_shwi (t) + 1;
1618 }
1619 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1620 ix = tree_to_shwi (t) + 1;
1621 }
1622 }
1623 break;
1624
1625 case ENUMERAL_TYPE:
1626 break;
1627
1628 default:
1629 gcc_unreachable ();
1630 }
1631
1632 if (ix >= 0)
1633 {
1634 /* Look for it in the type's vector of small shared ints. */
1635 if (!TYPE_CACHED_VALUES_P (type))
1636 {
1637 TYPE_CACHED_VALUES_P (type) = 1;
1638 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1639 }
1640
1641 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1642 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1643 }
1644 else
1645 {
1646 /* Use the cache of larger shared ints. */
1647 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1648 /* If there is already an entry for the number verify it's the
1649 same. */
1650 if (*slot)
1651 gcc_assert (wi::eq_p (tree (*slot), t));
1652 else
1653 /* Otherwise insert this one into the hash table. */
1654 *slot = t;
1655 }
1656 }
1657
1658
1659 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1660 and the rest are zeros. */
1661
1662 tree
1663 build_low_bits_mask (tree type, unsigned bits)
1664 {
1665 gcc_assert (bits <= TYPE_PRECISION (type));
1666
1667 return wide_int_to_tree (type, wi::mask (bits, false,
1668 TYPE_PRECISION (type)));
1669 }
1670
1671 /* Checks that X is integer constant that can be expressed in (unsigned)
1672 HOST_WIDE_INT without loss of precision. */
1673
1674 bool
1675 cst_and_fits_in_hwi (const_tree x)
1676 {
1677 return (TREE_CODE (x) == INTEGER_CST
1678 && TYPE_PRECISION (TREE_TYPE (x)) <= HOST_BITS_PER_WIDE_INT);
1679 }
1680
1681 /* Build a newly constructed VECTOR_CST node of length LEN. */
1682
1683 tree
1684 make_vector_stat (unsigned len MEM_STAT_DECL)
1685 {
1686 tree t;
1687 unsigned length = (len - 1) * sizeof (tree) + sizeof (struct tree_vector);
1688
1689 record_node_allocation_statistics (VECTOR_CST, length);
1690
1691 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1692
1693 TREE_SET_CODE (t, VECTOR_CST);
1694 TREE_CONSTANT (t) = 1;
1695
1696 return t;
1697 }
1698
1699 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1700 are in a list pointed to by VALS. */
1701
1702 tree
1703 build_vector_stat (tree type, tree *vals MEM_STAT_DECL)
1704 {
1705 int over = 0;
1706 unsigned cnt = 0;
1707 tree v = make_vector (TYPE_VECTOR_SUBPARTS (type));
1708 TREE_TYPE (v) = type;
1709
1710 /* Iterate through elements and check for overflow. */
1711 for (cnt = 0; cnt < TYPE_VECTOR_SUBPARTS (type); ++cnt)
1712 {
1713 tree value = vals[cnt];
1714
1715 VECTOR_CST_ELT (v, cnt) = value;
1716
1717 /* Don't crash if we get an address constant. */
1718 if (!CONSTANT_CLASS_P (value))
1719 continue;
1720
1721 over |= TREE_OVERFLOW (value);
1722 }
1723
1724 TREE_OVERFLOW (v) = over;
1725 return v;
1726 }
1727
1728 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1729 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1730
1731 tree
1732 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1733 {
1734 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
1735 unsigned HOST_WIDE_INT idx, pos = 0;
1736 tree value;
1737
1738 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1739 {
1740 if (TREE_CODE (value) == VECTOR_CST)
1741 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
1742 vec[pos++] = VECTOR_CST_ELT (value, i);
1743 else
1744 vec[pos++] = value;
1745 }
1746 while (pos < TYPE_VECTOR_SUBPARTS (type))
1747 vec[pos++] = build_zero_cst (TREE_TYPE (type));
1748
1749 return build_vector (type, vec);
1750 }
1751
1752 /* Build a vector of type VECTYPE where all the elements are SCs. */
1753 tree
1754 build_vector_from_val (tree vectype, tree sc)
1755 {
1756 int i, nunits = TYPE_VECTOR_SUBPARTS (vectype);
1757
1758 if (sc == error_mark_node)
1759 return sc;
1760
1761 /* Verify that the vector type is suitable for SC. Note that there
1762 is some inconsistency in the type-system with respect to restrict
1763 qualifications of pointers. Vector types always have a main-variant
1764 element type and the qualification is applied to the vector-type.
1765 So TREE_TYPE (vector-type) does not return a properly qualified
1766 vector element-type. */
1767 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1768 TREE_TYPE (vectype)));
1769
1770 if (CONSTANT_CLASS_P (sc))
1771 {
1772 tree *v = XALLOCAVEC (tree, nunits);
1773 for (i = 0; i < nunits; ++i)
1774 v[i] = sc;
1775 return build_vector (vectype, v);
1776 }
1777 else
1778 {
1779 vec<constructor_elt, va_gc> *v;
1780 vec_alloc (v, nunits);
1781 for (i = 0; i < nunits; ++i)
1782 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1783 return build_constructor (vectype, v);
1784 }
1785 }
1786
1787 /* Something has messed with the elements of CONSTRUCTOR C after it was built;
1788 calculate TREE_CONSTANT and TREE_SIDE_EFFECTS. */
1789
1790 void
1791 recompute_constructor_flags (tree c)
1792 {
1793 unsigned int i;
1794 tree val;
1795 bool constant_p = true;
1796 bool side_effects_p = false;
1797 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
1798
1799 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
1800 {
1801 /* Mostly ctors will have elts that don't have side-effects, so
1802 the usual case is to scan all the elements. Hence a single
1803 loop for both const and side effects, rather than one loop
1804 each (with early outs). */
1805 if (!TREE_CONSTANT (val))
1806 constant_p = false;
1807 if (TREE_SIDE_EFFECTS (val))
1808 side_effects_p = true;
1809 }
1810
1811 TREE_SIDE_EFFECTS (c) = side_effects_p;
1812 TREE_CONSTANT (c) = constant_p;
1813 }
1814
1815 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for
1816 CONSTRUCTOR C. */
1817
1818 void
1819 verify_constructor_flags (tree c)
1820 {
1821 unsigned int i;
1822 tree val;
1823 bool constant_p = TREE_CONSTANT (c);
1824 bool side_effects_p = TREE_SIDE_EFFECTS (c);
1825 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
1826
1827 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
1828 {
1829 if (constant_p && !TREE_CONSTANT (val))
1830 internal_error ("non-constant element in constant CONSTRUCTOR");
1831 if (!side_effects_p && TREE_SIDE_EFFECTS (val))
1832 internal_error ("side-effects element in no-side-effects CONSTRUCTOR");
1833 }
1834 }
1835
1836 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1837 are in the vec pointed to by VALS. */
1838 tree
1839 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
1840 {
1841 tree c = make_node (CONSTRUCTOR);
1842
1843 TREE_TYPE (c) = type;
1844 CONSTRUCTOR_ELTS (c) = vals;
1845
1846 recompute_constructor_flags (c);
1847
1848 return c;
1849 }
1850
1851 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1852 INDEX and VALUE. */
1853 tree
1854 build_constructor_single (tree type, tree index, tree value)
1855 {
1856 vec<constructor_elt, va_gc> *v;
1857 constructor_elt elt = {index, value};
1858
1859 vec_alloc (v, 1);
1860 v->quick_push (elt);
1861
1862 return build_constructor (type, v);
1863 }
1864
1865
1866 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1867 are in a list pointed to by VALS. */
1868 tree
1869 build_constructor_from_list (tree type, tree vals)
1870 {
1871 tree t;
1872 vec<constructor_elt, va_gc> *v = NULL;
1873
1874 if (vals)
1875 {
1876 vec_alloc (v, list_length (vals));
1877 for (t = vals; t; t = TREE_CHAIN (t))
1878 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
1879 }
1880
1881 return build_constructor (type, v);
1882 }
1883
1884 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1885 of elements, provided as index/value pairs. */
1886
1887 tree
1888 build_constructor_va (tree type, int nelts, ...)
1889 {
1890 vec<constructor_elt, va_gc> *v = NULL;
1891 va_list p;
1892
1893 va_start (p, nelts);
1894 vec_alloc (v, nelts);
1895 while (nelts--)
1896 {
1897 tree index = va_arg (p, tree);
1898 tree value = va_arg (p, tree);
1899 CONSTRUCTOR_APPEND_ELT (v, index, value);
1900 }
1901 va_end (p);
1902 return build_constructor (type, v);
1903 }
1904
1905 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1906
1907 tree
1908 build_fixed (tree type, FIXED_VALUE_TYPE f)
1909 {
1910 tree v;
1911 FIXED_VALUE_TYPE *fp;
1912
1913 v = make_node (FIXED_CST);
1914 fp = ggc_alloc<fixed_value> ();
1915 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
1916
1917 TREE_TYPE (v) = type;
1918 TREE_FIXED_CST_PTR (v) = fp;
1919 return v;
1920 }
1921
1922 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1923
1924 tree
1925 build_real (tree type, REAL_VALUE_TYPE d)
1926 {
1927 tree v;
1928 REAL_VALUE_TYPE *dp;
1929 int overflow = 0;
1930
1931 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1932 Consider doing it via real_convert now. */
1933
1934 v = make_node (REAL_CST);
1935 dp = ggc_alloc<real_value> ();
1936 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
1937
1938 TREE_TYPE (v) = type;
1939 TREE_REAL_CST_PTR (v) = dp;
1940 TREE_OVERFLOW (v) = overflow;
1941 return v;
1942 }
1943
1944 /* Like build_real, but first truncate D to the type. */
1945
1946 tree
1947 build_real_truncate (tree type, REAL_VALUE_TYPE d)
1948 {
1949 return build_real (type, real_value_truncate (TYPE_MODE (type), d));
1950 }
1951
1952 /* Return a new REAL_CST node whose type is TYPE
1953 and whose value is the integer value of the INTEGER_CST node I. */
1954
1955 REAL_VALUE_TYPE
1956 real_value_from_int_cst (const_tree type, const_tree i)
1957 {
1958 REAL_VALUE_TYPE d;
1959
1960 /* Clear all bits of the real value type so that we can later do
1961 bitwise comparisons to see if two values are the same. */
1962 memset (&d, 0, sizeof d);
1963
1964 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, i,
1965 TYPE_SIGN (TREE_TYPE (i)));
1966 return d;
1967 }
1968
1969 /* Given a tree representing an integer constant I, return a tree
1970 representing the same value as a floating-point constant of type TYPE. */
1971
1972 tree
1973 build_real_from_int_cst (tree type, const_tree i)
1974 {
1975 tree v;
1976 int overflow = TREE_OVERFLOW (i);
1977
1978 v = build_real (type, real_value_from_int_cst (type, i));
1979
1980 TREE_OVERFLOW (v) |= overflow;
1981 return v;
1982 }
1983
1984 /* Return a newly constructed STRING_CST node whose value is
1985 the LEN characters at STR.
1986 Note that for a C string literal, LEN should include the trailing NUL.
1987 The TREE_TYPE is not initialized. */
1988
1989 tree
1990 build_string (int len, const char *str)
1991 {
1992 tree s;
1993 size_t length;
1994
1995 /* Do not waste bytes provided by padding of struct tree_string. */
1996 length = len + offsetof (struct tree_string, str) + 1;
1997
1998 record_node_allocation_statistics (STRING_CST, length);
1999
2000 s = (tree) ggc_internal_alloc (length);
2001
2002 memset (s, 0, sizeof (struct tree_typed));
2003 TREE_SET_CODE (s, STRING_CST);
2004 TREE_CONSTANT (s) = 1;
2005 TREE_STRING_LENGTH (s) = len;
2006 memcpy (s->string.str, str, len);
2007 s->string.str[len] = '\0';
2008
2009 return s;
2010 }
2011
2012 /* Return a newly constructed COMPLEX_CST node whose value is
2013 specified by the real and imaginary parts REAL and IMAG.
2014 Both REAL and IMAG should be constant nodes. TYPE, if specified,
2015 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
2016
2017 tree
2018 build_complex (tree type, tree real, tree imag)
2019 {
2020 tree t = make_node (COMPLEX_CST);
2021
2022 TREE_REALPART (t) = real;
2023 TREE_IMAGPART (t) = imag;
2024 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
2025 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
2026 return t;
2027 }
2028
2029 /* Build a complex (inf +- 0i), such as for the result of cproj.
2030 TYPE is the complex tree type of the result. If NEG is true, the
2031 imaginary zero is negative. */
2032
2033 tree
2034 build_complex_inf (tree type, bool neg)
2035 {
2036 REAL_VALUE_TYPE rinf, rzero = dconst0;
2037
2038 real_inf (&rinf);
2039 rzero.sign = neg;
2040 return build_complex (type, build_real (TREE_TYPE (type), rinf),
2041 build_real (TREE_TYPE (type), rzero));
2042 }
2043
2044 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
2045 element is set to 1. In particular, this is 1 + i for complex types. */
2046
2047 tree
2048 build_each_one_cst (tree type)
2049 {
2050 if (TREE_CODE (type) == COMPLEX_TYPE)
2051 {
2052 tree scalar = build_one_cst (TREE_TYPE (type));
2053 return build_complex (type, scalar, scalar);
2054 }
2055 else
2056 return build_one_cst (type);
2057 }
2058
2059 /* Return a constant of arithmetic type TYPE which is the
2060 multiplicative identity of the set TYPE. */
2061
2062 tree
2063 build_one_cst (tree type)
2064 {
2065 switch (TREE_CODE (type))
2066 {
2067 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2068 case POINTER_TYPE: case REFERENCE_TYPE:
2069 case OFFSET_TYPE:
2070 return build_int_cst (type, 1);
2071
2072 case REAL_TYPE:
2073 return build_real (type, dconst1);
2074
2075 case FIXED_POINT_TYPE:
2076 /* We can only generate 1 for accum types. */
2077 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2078 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
2079
2080 case VECTOR_TYPE:
2081 {
2082 tree scalar = build_one_cst (TREE_TYPE (type));
2083
2084 return build_vector_from_val (type, scalar);
2085 }
2086
2087 case COMPLEX_TYPE:
2088 return build_complex (type,
2089 build_one_cst (TREE_TYPE (type)),
2090 build_zero_cst (TREE_TYPE (type)));
2091
2092 default:
2093 gcc_unreachable ();
2094 }
2095 }
2096
2097 /* Return an integer of type TYPE containing all 1's in as much precision as
2098 it contains, or a complex or vector whose subparts are such integers. */
2099
2100 tree
2101 build_all_ones_cst (tree type)
2102 {
2103 if (TREE_CODE (type) == COMPLEX_TYPE)
2104 {
2105 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2106 return build_complex (type, scalar, scalar);
2107 }
2108 else
2109 return build_minus_one_cst (type);
2110 }
2111
2112 /* Return a constant of arithmetic type TYPE which is the
2113 opposite of the multiplicative identity of the set TYPE. */
2114
2115 tree
2116 build_minus_one_cst (tree type)
2117 {
2118 switch (TREE_CODE (type))
2119 {
2120 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2121 case POINTER_TYPE: case REFERENCE_TYPE:
2122 case OFFSET_TYPE:
2123 return build_int_cst (type, -1);
2124
2125 case REAL_TYPE:
2126 return build_real (type, dconstm1);
2127
2128 case FIXED_POINT_TYPE:
2129 /* We can only generate 1 for accum types. */
2130 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2131 return build_fixed (type, fixed_from_double_int (double_int_minus_one,
2132 TYPE_MODE (type)));
2133
2134 case VECTOR_TYPE:
2135 {
2136 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2137
2138 return build_vector_from_val (type, scalar);
2139 }
2140
2141 case COMPLEX_TYPE:
2142 return build_complex (type,
2143 build_minus_one_cst (TREE_TYPE (type)),
2144 build_zero_cst (TREE_TYPE (type)));
2145
2146 default:
2147 gcc_unreachable ();
2148 }
2149 }
2150
2151 /* Build 0 constant of type TYPE. This is used by constructor folding
2152 and thus the constant should be represented in memory by
2153 zero(es). */
2154
2155 tree
2156 build_zero_cst (tree type)
2157 {
2158 switch (TREE_CODE (type))
2159 {
2160 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2161 case POINTER_TYPE: case REFERENCE_TYPE:
2162 case OFFSET_TYPE: case NULLPTR_TYPE:
2163 return build_int_cst (type, 0);
2164
2165 case REAL_TYPE:
2166 return build_real (type, dconst0);
2167
2168 case FIXED_POINT_TYPE:
2169 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2170
2171 case VECTOR_TYPE:
2172 {
2173 tree scalar = build_zero_cst (TREE_TYPE (type));
2174
2175 return build_vector_from_val (type, scalar);
2176 }
2177
2178 case COMPLEX_TYPE:
2179 {
2180 tree zero = build_zero_cst (TREE_TYPE (type));
2181
2182 return build_complex (type, zero, zero);
2183 }
2184
2185 default:
2186 if (!AGGREGATE_TYPE_P (type))
2187 return fold_convert (type, integer_zero_node);
2188 return build_constructor (type, NULL);
2189 }
2190 }
2191
2192
2193 /* Build a BINFO with LEN language slots. */
2194
2195 tree
2196 make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL)
2197 {
2198 tree t;
2199 size_t length = (offsetof (struct tree_binfo, base_binfos)
2200 + vec<tree, va_gc>::embedded_size (base_binfos));
2201
2202 record_node_allocation_statistics (TREE_BINFO, length);
2203
2204 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2205
2206 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2207
2208 TREE_SET_CODE (t, TREE_BINFO);
2209
2210 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2211
2212 return t;
2213 }
2214
2215 /* Create a CASE_LABEL_EXPR tree node and return it. */
2216
2217 tree
2218 build_case_label (tree low_value, tree high_value, tree label_decl)
2219 {
2220 tree t = make_node (CASE_LABEL_EXPR);
2221
2222 TREE_TYPE (t) = void_type_node;
2223 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2224
2225 CASE_LOW (t) = low_value;
2226 CASE_HIGH (t) = high_value;
2227 CASE_LABEL (t) = label_decl;
2228 CASE_CHAIN (t) = NULL_TREE;
2229
2230 return t;
2231 }
2232
2233 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2234 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2235 The latter determines the length of the HOST_WIDE_INT vector. */
2236
2237 tree
2238 make_int_cst_stat (int len, int ext_len MEM_STAT_DECL)
2239 {
2240 tree t;
2241 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2242 + sizeof (struct tree_int_cst));
2243
2244 gcc_assert (len);
2245 record_node_allocation_statistics (INTEGER_CST, length);
2246
2247 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2248
2249 TREE_SET_CODE (t, INTEGER_CST);
2250 TREE_INT_CST_NUNITS (t) = len;
2251 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2252 /* to_offset can only be applied to trees that are offset_int-sized
2253 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2254 must be exactly the precision of offset_int and so LEN is correct. */
2255 if (ext_len <= OFFSET_INT_ELTS)
2256 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2257 else
2258 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2259
2260 TREE_CONSTANT (t) = 1;
2261
2262 return t;
2263 }
2264
2265 /* Build a newly constructed TREE_VEC node of length LEN. */
2266
2267 tree
2268 make_tree_vec_stat (int len MEM_STAT_DECL)
2269 {
2270 tree t;
2271 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2272
2273 record_node_allocation_statistics (TREE_VEC, length);
2274
2275 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2276
2277 TREE_SET_CODE (t, TREE_VEC);
2278 TREE_VEC_LENGTH (t) = len;
2279
2280 return t;
2281 }
2282
2283 /* Grow a TREE_VEC node to new length LEN. */
2284
2285 tree
2286 grow_tree_vec_stat (tree v, int len MEM_STAT_DECL)
2287 {
2288 gcc_assert (TREE_CODE (v) == TREE_VEC);
2289
2290 int oldlen = TREE_VEC_LENGTH (v);
2291 gcc_assert (len > oldlen);
2292
2293 int oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2294 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2295
2296 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2297
2298 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2299
2300 TREE_VEC_LENGTH (v) = len;
2301
2302 return v;
2303 }
2304 \f
2305 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
2306 fixed, and scalar, complex or vector. */
2307
2308 int
2309 zerop (const_tree expr)
2310 {
2311 return (integer_zerop (expr)
2312 || real_zerop (expr)
2313 || fixed_zerop (expr));
2314 }
2315
2316 /* Return 1 if EXPR is the integer constant zero or a complex constant
2317 of zero. */
2318
2319 int
2320 integer_zerop (const_tree expr)
2321 {
2322 switch (TREE_CODE (expr))
2323 {
2324 case INTEGER_CST:
2325 return wi::eq_p (expr, 0);
2326 case COMPLEX_CST:
2327 return (integer_zerop (TREE_REALPART (expr))
2328 && integer_zerop (TREE_IMAGPART (expr)));
2329 case VECTOR_CST:
2330 {
2331 unsigned i;
2332 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2333 if (!integer_zerop (VECTOR_CST_ELT (expr, i)))
2334 return false;
2335 return true;
2336 }
2337 default:
2338 return false;
2339 }
2340 }
2341
2342 /* Return 1 if EXPR is the integer constant one or the corresponding
2343 complex constant. */
2344
2345 int
2346 integer_onep (const_tree expr)
2347 {
2348 switch (TREE_CODE (expr))
2349 {
2350 case INTEGER_CST:
2351 return wi::eq_p (wi::to_widest (expr), 1);
2352 case COMPLEX_CST:
2353 return (integer_onep (TREE_REALPART (expr))
2354 && integer_zerop (TREE_IMAGPART (expr)));
2355 case VECTOR_CST:
2356 {
2357 unsigned i;
2358 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2359 if (!integer_onep (VECTOR_CST_ELT (expr, i)))
2360 return false;
2361 return true;
2362 }
2363 default:
2364 return false;
2365 }
2366 }
2367
2368 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2369 return 1 if every piece is the integer constant one. */
2370
2371 int
2372 integer_each_onep (const_tree expr)
2373 {
2374 if (TREE_CODE (expr) == COMPLEX_CST)
2375 return (integer_onep (TREE_REALPART (expr))
2376 && integer_onep (TREE_IMAGPART (expr)));
2377 else
2378 return integer_onep (expr);
2379 }
2380
2381 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2382 it contains, or a complex or vector whose subparts are such integers. */
2383
2384 int
2385 integer_all_onesp (const_tree expr)
2386 {
2387 if (TREE_CODE (expr) == COMPLEX_CST
2388 && integer_all_onesp (TREE_REALPART (expr))
2389 && integer_all_onesp (TREE_IMAGPART (expr)))
2390 return 1;
2391
2392 else if (TREE_CODE (expr) == VECTOR_CST)
2393 {
2394 unsigned i;
2395 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2396 if (!integer_all_onesp (VECTOR_CST_ELT (expr, i)))
2397 return 0;
2398 return 1;
2399 }
2400
2401 else if (TREE_CODE (expr) != INTEGER_CST)
2402 return 0;
2403
2404 return wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED) == expr;
2405 }
2406
2407 /* Return 1 if EXPR is the integer constant minus one. */
2408
2409 int
2410 integer_minus_onep (const_tree expr)
2411 {
2412 if (TREE_CODE (expr) == COMPLEX_CST)
2413 return (integer_all_onesp (TREE_REALPART (expr))
2414 && integer_zerop (TREE_IMAGPART (expr)));
2415 else
2416 return integer_all_onesp (expr);
2417 }
2418
2419 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2420 one bit on). */
2421
2422 int
2423 integer_pow2p (const_tree expr)
2424 {
2425 if (TREE_CODE (expr) == COMPLEX_CST
2426 && integer_pow2p (TREE_REALPART (expr))
2427 && integer_zerop (TREE_IMAGPART (expr)))
2428 return 1;
2429
2430 if (TREE_CODE (expr) != INTEGER_CST)
2431 return 0;
2432
2433 return wi::popcount (expr) == 1;
2434 }
2435
2436 /* Return 1 if EXPR is an integer constant other than zero or a
2437 complex constant other than zero. */
2438
2439 int
2440 integer_nonzerop (const_tree expr)
2441 {
2442 return ((TREE_CODE (expr) == INTEGER_CST
2443 && !wi::eq_p (expr, 0))
2444 || (TREE_CODE (expr) == COMPLEX_CST
2445 && (integer_nonzerop (TREE_REALPART (expr))
2446 || integer_nonzerop (TREE_IMAGPART (expr)))));
2447 }
2448
2449 /* Return 1 if EXPR is the integer constant one. For vector,
2450 return 1 if every piece is the integer constant minus one
2451 (representing the value TRUE). */
2452
2453 int
2454 integer_truep (const_tree expr)
2455 {
2456 if (TREE_CODE (expr) == VECTOR_CST)
2457 return integer_all_onesp (expr);
2458 return integer_onep (expr);
2459 }
2460
2461 /* Return 1 if EXPR is the fixed-point constant zero. */
2462
2463 int
2464 fixed_zerop (const_tree expr)
2465 {
2466 return (TREE_CODE (expr) == FIXED_CST
2467 && TREE_FIXED_CST (expr).data.is_zero ());
2468 }
2469
2470 /* Return the power of two represented by a tree node known to be a
2471 power of two. */
2472
2473 int
2474 tree_log2 (const_tree expr)
2475 {
2476 if (TREE_CODE (expr) == COMPLEX_CST)
2477 return tree_log2 (TREE_REALPART (expr));
2478
2479 return wi::exact_log2 (expr);
2480 }
2481
2482 /* Similar, but return the largest integer Y such that 2 ** Y is less
2483 than or equal to EXPR. */
2484
2485 int
2486 tree_floor_log2 (const_tree expr)
2487 {
2488 if (TREE_CODE (expr) == COMPLEX_CST)
2489 return tree_log2 (TREE_REALPART (expr));
2490
2491 return wi::floor_log2 (expr);
2492 }
2493
2494 /* Return number of known trailing zero bits in EXPR, or, if the value of
2495 EXPR is known to be zero, the precision of it's type. */
2496
2497 unsigned int
2498 tree_ctz (const_tree expr)
2499 {
2500 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2501 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2502 return 0;
2503
2504 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2505 switch (TREE_CODE (expr))
2506 {
2507 case INTEGER_CST:
2508 ret1 = wi::ctz (expr);
2509 return MIN (ret1, prec);
2510 case SSA_NAME:
2511 ret1 = wi::ctz (get_nonzero_bits (expr));
2512 return MIN (ret1, prec);
2513 case PLUS_EXPR:
2514 case MINUS_EXPR:
2515 case BIT_IOR_EXPR:
2516 case BIT_XOR_EXPR:
2517 case MIN_EXPR:
2518 case MAX_EXPR:
2519 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2520 if (ret1 == 0)
2521 return ret1;
2522 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2523 return MIN (ret1, ret2);
2524 case POINTER_PLUS_EXPR:
2525 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2526 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2527 /* Second operand is sizetype, which could be in theory
2528 wider than pointer's precision. Make sure we never
2529 return more than prec. */
2530 ret2 = MIN (ret2, prec);
2531 return MIN (ret1, ret2);
2532 case BIT_AND_EXPR:
2533 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2534 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2535 return MAX (ret1, ret2);
2536 case MULT_EXPR:
2537 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2538 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2539 return MIN (ret1 + ret2, prec);
2540 case LSHIFT_EXPR:
2541 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2542 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2543 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2544 {
2545 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2546 return MIN (ret1 + ret2, prec);
2547 }
2548 return ret1;
2549 case RSHIFT_EXPR:
2550 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2551 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2552 {
2553 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2554 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2555 if (ret1 > ret2)
2556 return ret1 - ret2;
2557 }
2558 return 0;
2559 case TRUNC_DIV_EXPR:
2560 case CEIL_DIV_EXPR:
2561 case FLOOR_DIV_EXPR:
2562 case ROUND_DIV_EXPR:
2563 case EXACT_DIV_EXPR:
2564 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2565 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2566 {
2567 int l = tree_log2 (TREE_OPERAND (expr, 1));
2568 if (l >= 0)
2569 {
2570 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2571 ret2 = l;
2572 if (ret1 > ret2)
2573 return ret1 - ret2;
2574 }
2575 }
2576 return 0;
2577 CASE_CONVERT:
2578 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2579 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2580 ret1 = prec;
2581 return MIN (ret1, prec);
2582 case SAVE_EXPR:
2583 return tree_ctz (TREE_OPERAND (expr, 0));
2584 case COND_EXPR:
2585 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2586 if (ret1 == 0)
2587 return 0;
2588 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2589 return MIN (ret1, ret2);
2590 case COMPOUND_EXPR:
2591 return tree_ctz (TREE_OPERAND (expr, 1));
2592 case ADDR_EXPR:
2593 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2594 if (ret1 > BITS_PER_UNIT)
2595 {
2596 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2597 return MIN (ret1, prec);
2598 }
2599 return 0;
2600 default:
2601 return 0;
2602 }
2603 }
2604
2605 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2606 decimal float constants, so don't return 1 for them. */
2607
2608 int
2609 real_zerop (const_tree expr)
2610 {
2611 switch (TREE_CODE (expr))
2612 {
2613 case REAL_CST:
2614 return real_equal (&TREE_REAL_CST (expr), &dconst0)
2615 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2616 case COMPLEX_CST:
2617 return real_zerop (TREE_REALPART (expr))
2618 && real_zerop (TREE_IMAGPART (expr));
2619 case VECTOR_CST:
2620 {
2621 unsigned i;
2622 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2623 if (!real_zerop (VECTOR_CST_ELT (expr, i)))
2624 return false;
2625 return true;
2626 }
2627 default:
2628 return false;
2629 }
2630 }
2631
2632 /* Return 1 if EXPR is the real constant one in real or complex form.
2633 Trailing zeroes matter for decimal float constants, so don't return
2634 1 for them. */
2635
2636 int
2637 real_onep (const_tree expr)
2638 {
2639 switch (TREE_CODE (expr))
2640 {
2641 case REAL_CST:
2642 return real_equal (&TREE_REAL_CST (expr), &dconst1)
2643 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2644 case COMPLEX_CST:
2645 return real_onep (TREE_REALPART (expr))
2646 && real_zerop (TREE_IMAGPART (expr));
2647 case VECTOR_CST:
2648 {
2649 unsigned i;
2650 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2651 if (!real_onep (VECTOR_CST_ELT (expr, i)))
2652 return false;
2653 return true;
2654 }
2655 default:
2656 return false;
2657 }
2658 }
2659
2660 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2661 matter for decimal float constants, so don't return 1 for them. */
2662
2663 int
2664 real_minus_onep (const_tree expr)
2665 {
2666 switch (TREE_CODE (expr))
2667 {
2668 case REAL_CST:
2669 return real_equal (&TREE_REAL_CST (expr), &dconstm1)
2670 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2671 case COMPLEX_CST:
2672 return real_minus_onep (TREE_REALPART (expr))
2673 && real_zerop (TREE_IMAGPART (expr));
2674 case VECTOR_CST:
2675 {
2676 unsigned i;
2677 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2678 if (!real_minus_onep (VECTOR_CST_ELT (expr, i)))
2679 return false;
2680 return true;
2681 }
2682 default:
2683 return false;
2684 }
2685 }
2686
2687 /* Nonzero if EXP is a constant or a cast of a constant. */
2688
2689 int
2690 really_constant_p (const_tree exp)
2691 {
2692 /* This is not quite the same as STRIP_NOPS. It does more. */
2693 while (CONVERT_EXPR_P (exp)
2694 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2695 exp = TREE_OPERAND (exp, 0);
2696 return TREE_CONSTANT (exp);
2697 }
2698 \f
2699 /* Return first list element whose TREE_VALUE is ELEM.
2700 Return 0 if ELEM is not in LIST. */
2701
2702 tree
2703 value_member (tree elem, tree list)
2704 {
2705 while (list)
2706 {
2707 if (elem == TREE_VALUE (list))
2708 return list;
2709 list = TREE_CHAIN (list);
2710 }
2711 return NULL_TREE;
2712 }
2713
2714 /* Return first list element whose TREE_PURPOSE is ELEM.
2715 Return 0 if ELEM is not in LIST. */
2716
2717 tree
2718 purpose_member (const_tree elem, tree list)
2719 {
2720 while (list)
2721 {
2722 if (elem == TREE_PURPOSE (list))
2723 return list;
2724 list = TREE_CHAIN (list);
2725 }
2726 return NULL_TREE;
2727 }
2728
2729 /* Return true if ELEM is in V. */
2730
2731 bool
2732 vec_member (const_tree elem, vec<tree, va_gc> *v)
2733 {
2734 unsigned ix;
2735 tree t;
2736 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2737 if (elem == t)
2738 return true;
2739 return false;
2740 }
2741
2742 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2743 NULL_TREE. */
2744
2745 tree
2746 chain_index (int idx, tree chain)
2747 {
2748 for (; chain && idx > 0; --idx)
2749 chain = TREE_CHAIN (chain);
2750 return chain;
2751 }
2752
2753 /* Return nonzero if ELEM is part of the chain CHAIN. */
2754
2755 int
2756 chain_member (const_tree elem, const_tree chain)
2757 {
2758 while (chain)
2759 {
2760 if (elem == chain)
2761 return 1;
2762 chain = DECL_CHAIN (chain);
2763 }
2764
2765 return 0;
2766 }
2767
2768 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2769 We expect a null pointer to mark the end of the chain.
2770 This is the Lisp primitive `length'. */
2771
2772 int
2773 list_length (const_tree t)
2774 {
2775 const_tree p = t;
2776 #ifdef ENABLE_TREE_CHECKING
2777 const_tree q = t;
2778 #endif
2779 int len = 0;
2780
2781 while (p)
2782 {
2783 p = TREE_CHAIN (p);
2784 #ifdef ENABLE_TREE_CHECKING
2785 if (len % 2)
2786 q = TREE_CHAIN (q);
2787 gcc_assert (p != q);
2788 #endif
2789 len++;
2790 }
2791
2792 return len;
2793 }
2794
2795 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2796 UNION_TYPE TYPE, or NULL_TREE if none. */
2797
2798 tree
2799 first_field (const_tree type)
2800 {
2801 tree t = TYPE_FIELDS (type);
2802 while (t && TREE_CODE (t) != FIELD_DECL)
2803 t = TREE_CHAIN (t);
2804 return t;
2805 }
2806
2807 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2808 by modifying the last node in chain 1 to point to chain 2.
2809 This is the Lisp primitive `nconc'. */
2810
2811 tree
2812 chainon (tree op1, tree op2)
2813 {
2814 tree t1;
2815
2816 if (!op1)
2817 return op2;
2818 if (!op2)
2819 return op1;
2820
2821 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
2822 continue;
2823 TREE_CHAIN (t1) = op2;
2824
2825 #ifdef ENABLE_TREE_CHECKING
2826 {
2827 tree t2;
2828 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
2829 gcc_assert (t2 != t1);
2830 }
2831 #endif
2832
2833 return op1;
2834 }
2835
2836 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2837
2838 tree
2839 tree_last (tree chain)
2840 {
2841 tree next;
2842 if (chain)
2843 while ((next = TREE_CHAIN (chain)))
2844 chain = next;
2845 return chain;
2846 }
2847
2848 /* Reverse the order of elements in the chain T,
2849 and return the new head of the chain (old last element). */
2850
2851 tree
2852 nreverse (tree t)
2853 {
2854 tree prev = 0, decl, next;
2855 for (decl = t; decl; decl = next)
2856 {
2857 /* We shouldn't be using this function to reverse BLOCK chains; we
2858 have blocks_nreverse for that. */
2859 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
2860 next = TREE_CHAIN (decl);
2861 TREE_CHAIN (decl) = prev;
2862 prev = decl;
2863 }
2864 return prev;
2865 }
2866 \f
2867 /* Return a newly created TREE_LIST node whose
2868 purpose and value fields are PARM and VALUE. */
2869
2870 tree
2871 build_tree_list_stat (tree parm, tree value MEM_STAT_DECL)
2872 {
2873 tree t = make_node_stat (TREE_LIST PASS_MEM_STAT);
2874 TREE_PURPOSE (t) = parm;
2875 TREE_VALUE (t) = value;
2876 return t;
2877 }
2878
2879 /* Build a chain of TREE_LIST nodes from a vector. */
2880
2881 tree
2882 build_tree_list_vec_stat (const vec<tree, va_gc> *vec MEM_STAT_DECL)
2883 {
2884 tree ret = NULL_TREE;
2885 tree *pp = &ret;
2886 unsigned int i;
2887 tree t;
2888 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
2889 {
2890 *pp = build_tree_list_stat (NULL, t PASS_MEM_STAT);
2891 pp = &TREE_CHAIN (*pp);
2892 }
2893 return ret;
2894 }
2895
2896 /* Return a newly created TREE_LIST node whose
2897 purpose and value fields are PURPOSE and VALUE
2898 and whose TREE_CHAIN is CHAIN. */
2899
2900 tree
2901 tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL)
2902 {
2903 tree node;
2904
2905 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
2906 memset (node, 0, sizeof (struct tree_common));
2907
2908 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
2909
2910 TREE_SET_CODE (node, TREE_LIST);
2911 TREE_CHAIN (node) = chain;
2912 TREE_PURPOSE (node) = purpose;
2913 TREE_VALUE (node) = value;
2914 return node;
2915 }
2916
2917 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2918 trees. */
2919
2920 vec<tree, va_gc> *
2921 ctor_to_vec (tree ctor)
2922 {
2923 vec<tree, va_gc> *vec;
2924 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
2925 unsigned int ix;
2926 tree val;
2927
2928 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
2929 vec->quick_push (val);
2930
2931 return vec;
2932 }
2933 \f
2934 /* Return the size nominally occupied by an object of type TYPE
2935 when it resides in memory. The value is measured in units of bytes,
2936 and its data type is that normally used for type sizes
2937 (which is the first type created by make_signed_type or
2938 make_unsigned_type). */
2939
2940 tree
2941 size_in_bytes_loc (location_t loc, const_tree type)
2942 {
2943 tree t;
2944
2945 if (type == error_mark_node)
2946 return integer_zero_node;
2947
2948 type = TYPE_MAIN_VARIANT (type);
2949 t = TYPE_SIZE_UNIT (type);
2950
2951 if (t == 0)
2952 {
2953 lang_hooks.types.incomplete_type_error (loc, NULL_TREE, type);
2954 return size_zero_node;
2955 }
2956
2957 return t;
2958 }
2959
2960 /* Return the size of TYPE (in bytes) as a wide integer
2961 or return -1 if the size can vary or is larger than an integer. */
2962
2963 HOST_WIDE_INT
2964 int_size_in_bytes (const_tree type)
2965 {
2966 tree t;
2967
2968 if (type == error_mark_node)
2969 return 0;
2970
2971 type = TYPE_MAIN_VARIANT (type);
2972 t = TYPE_SIZE_UNIT (type);
2973
2974 if (t && tree_fits_uhwi_p (t))
2975 return TREE_INT_CST_LOW (t);
2976 else
2977 return -1;
2978 }
2979
2980 /* Return the maximum size of TYPE (in bytes) as a wide integer
2981 or return -1 if the size can vary or is larger than an integer. */
2982
2983 HOST_WIDE_INT
2984 max_int_size_in_bytes (const_tree type)
2985 {
2986 HOST_WIDE_INT size = -1;
2987 tree size_tree;
2988
2989 /* If this is an array type, check for a possible MAX_SIZE attached. */
2990
2991 if (TREE_CODE (type) == ARRAY_TYPE)
2992 {
2993 size_tree = TYPE_ARRAY_MAX_SIZE (type);
2994
2995 if (size_tree && tree_fits_uhwi_p (size_tree))
2996 size = tree_to_uhwi (size_tree);
2997 }
2998
2999 /* If we still haven't been able to get a size, see if the language
3000 can compute a maximum size. */
3001
3002 if (size == -1)
3003 {
3004 size_tree = lang_hooks.types.max_size (type);
3005
3006 if (size_tree && tree_fits_uhwi_p (size_tree))
3007 size = tree_to_uhwi (size_tree);
3008 }
3009
3010 return size;
3011 }
3012 \f
3013 /* Return the bit position of FIELD, in bits from the start of the record.
3014 This is a tree of type bitsizetype. */
3015
3016 tree
3017 bit_position (const_tree field)
3018 {
3019 return bit_from_pos (DECL_FIELD_OFFSET (field),
3020 DECL_FIELD_BIT_OFFSET (field));
3021 }
3022 \f
3023 /* Return the byte position of FIELD, in bytes from the start of the record.
3024 This is a tree of type sizetype. */
3025
3026 tree
3027 byte_position (const_tree field)
3028 {
3029 return byte_from_pos (DECL_FIELD_OFFSET (field),
3030 DECL_FIELD_BIT_OFFSET (field));
3031 }
3032
3033 /* Likewise, but return as an integer. It must be representable in
3034 that way (since it could be a signed value, we don't have the
3035 option of returning -1 like int_size_in_byte can. */
3036
3037 HOST_WIDE_INT
3038 int_byte_position (const_tree field)
3039 {
3040 return tree_to_shwi (byte_position (field));
3041 }
3042 \f
3043 /* Return the strictest alignment, in bits, that T is known to have. */
3044
3045 unsigned int
3046 expr_align (const_tree t)
3047 {
3048 unsigned int align0, align1;
3049
3050 switch (TREE_CODE (t))
3051 {
3052 CASE_CONVERT: case NON_LVALUE_EXPR:
3053 /* If we have conversions, we know that the alignment of the
3054 object must meet each of the alignments of the types. */
3055 align0 = expr_align (TREE_OPERAND (t, 0));
3056 align1 = TYPE_ALIGN (TREE_TYPE (t));
3057 return MAX (align0, align1);
3058
3059 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
3060 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
3061 case CLEANUP_POINT_EXPR:
3062 /* These don't change the alignment of an object. */
3063 return expr_align (TREE_OPERAND (t, 0));
3064
3065 case COND_EXPR:
3066 /* The best we can do is say that the alignment is the least aligned
3067 of the two arms. */
3068 align0 = expr_align (TREE_OPERAND (t, 1));
3069 align1 = expr_align (TREE_OPERAND (t, 2));
3070 return MIN (align0, align1);
3071
3072 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
3073 meaningfully, it's always 1. */
3074 case LABEL_DECL: case CONST_DECL:
3075 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
3076 case FUNCTION_DECL:
3077 gcc_assert (DECL_ALIGN (t) != 0);
3078 return DECL_ALIGN (t);
3079
3080 default:
3081 break;
3082 }
3083
3084 /* Otherwise take the alignment from that of the type. */
3085 return TYPE_ALIGN (TREE_TYPE (t));
3086 }
3087 \f
3088 /* Return, as a tree node, the number of elements for TYPE (which is an
3089 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3090
3091 tree
3092 array_type_nelts (const_tree type)
3093 {
3094 tree index_type, min, max;
3095
3096 /* If they did it with unspecified bounds, then we should have already
3097 given an error about it before we got here. */
3098 if (! TYPE_DOMAIN (type))
3099 return error_mark_node;
3100
3101 index_type = TYPE_DOMAIN (type);
3102 min = TYPE_MIN_VALUE (index_type);
3103 max = TYPE_MAX_VALUE (index_type);
3104
3105 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3106 if (!max)
3107 return error_mark_node;
3108
3109 return (integer_zerop (min)
3110 ? max
3111 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3112 }
3113 \f
3114 /* If arg is static -- a reference to an object in static storage -- then
3115 return the object. This is not the same as the C meaning of `static'.
3116 If arg isn't static, return NULL. */
3117
3118 tree
3119 staticp (tree arg)
3120 {
3121 switch (TREE_CODE (arg))
3122 {
3123 case FUNCTION_DECL:
3124 /* Nested functions are static, even though taking their address will
3125 involve a trampoline as we unnest the nested function and create
3126 the trampoline on the tree level. */
3127 return arg;
3128
3129 case VAR_DECL:
3130 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3131 && ! DECL_THREAD_LOCAL_P (arg)
3132 && ! DECL_DLLIMPORT_P (arg)
3133 ? arg : NULL);
3134
3135 case CONST_DECL:
3136 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3137 ? arg : NULL);
3138
3139 case CONSTRUCTOR:
3140 return TREE_STATIC (arg) ? arg : NULL;
3141
3142 case LABEL_DECL:
3143 case STRING_CST:
3144 return arg;
3145
3146 case COMPONENT_REF:
3147 /* If the thing being referenced is not a field, then it is
3148 something language specific. */
3149 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3150
3151 /* If we are referencing a bitfield, we can't evaluate an
3152 ADDR_EXPR at compile time and so it isn't a constant. */
3153 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3154 return NULL;
3155
3156 return staticp (TREE_OPERAND (arg, 0));
3157
3158 case BIT_FIELD_REF:
3159 return NULL;
3160
3161 case INDIRECT_REF:
3162 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3163
3164 case ARRAY_REF:
3165 case ARRAY_RANGE_REF:
3166 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3167 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3168 return staticp (TREE_OPERAND (arg, 0));
3169 else
3170 return NULL;
3171
3172 case COMPOUND_LITERAL_EXPR:
3173 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3174
3175 default:
3176 return NULL;
3177 }
3178 }
3179
3180 \f
3181
3182
3183 /* Return whether OP is a DECL whose address is function-invariant. */
3184
3185 bool
3186 decl_address_invariant_p (const_tree op)
3187 {
3188 /* The conditions below are slightly less strict than the one in
3189 staticp. */
3190
3191 switch (TREE_CODE (op))
3192 {
3193 case PARM_DECL:
3194 case RESULT_DECL:
3195 case LABEL_DECL:
3196 case FUNCTION_DECL:
3197 return true;
3198
3199 case VAR_DECL:
3200 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3201 || DECL_THREAD_LOCAL_P (op)
3202 || DECL_CONTEXT (op) == current_function_decl
3203 || decl_function_context (op) == current_function_decl)
3204 return true;
3205 break;
3206
3207 case CONST_DECL:
3208 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3209 || decl_function_context (op) == current_function_decl)
3210 return true;
3211 break;
3212
3213 default:
3214 break;
3215 }
3216
3217 return false;
3218 }
3219
3220 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3221
3222 bool
3223 decl_address_ip_invariant_p (const_tree op)
3224 {
3225 /* The conditions below are slightly less strict than the one in
3226 staticp. */
3227
3228 switch (TREE_CODE (op))
3229 {
3230 case LABEL_DECL:
3231 case FUNCTION_DECL:
3232 case STRING_CST:
3233 return true;
3234
3235 case VAR_DECL:
3236 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3237 && !DECL_DLLIMPORT_P (op))
3238 || DECL_THREAD_LOCAL_P (op))
3239 return true;
3240 break;
3241
3242 case CONST_DECL:
3243 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3244 return true;
3245 break;
3246
3247 default:
3248 break;
3249 }
3250
3251 return false;
3252 }
3253
3254
3255 /* Return true if T is function-invariant (internal function, does
3256 not handle arithmetic; that's handled in skip_simple_arithmetic and
3257 tree_invariant_p). */
3258
3259 static bool
3260 tree_invariant_p_1 (tree t)
3261 {
3262 tree op;
3263
3264 if (TREE_CONSTANT (t)
3265 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3266 return true;
3267
3268 switch (TREE_CODE (t))
3269 {
3270 case SAVE_EXPR:
3271 return true;
3272
3273 case ADDR_EXPR:
3274 op = TREE_OPERAND (t, 0);
3275 while (handled_component_p (op))
3276 {
3277 switch (TREE_CODE (op))
3278 {
3279 case ARRAY_REF:
3280 case ARRAY_RANGE_REF:
3281 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3282 || TREE_OPERAND (op, 2) != NULL_TREE
3283 || TREE_OPERAND (op, 3) != NULL_TREE)
3284 return false;
3285 break;
3286
3287 case COMPONENT_REF:
3288 if (TREE_OPERAND (op, 2) != NULL_TREE)
3289 return false;
3290 break;
3291
3292 default:;
3293 }
3294 op = TREE_OPERAND (op, 0);
3295 }
3296
3297 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3298
3299 default:
3300 break;
3301 }
3302
3303 return false;
3304 }
3305
3306 /* Return true if T is function-invariant. */
3307
3308 bool
3309 tree_invariant_p (tree t)
3310 {
3311 tree inner = skip_simple_arithmetic (t);
3312 return tree_invariant_p_1 (inner);
3313 }
3314
3315 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3316 Do this to any expression which may be used in more than one place,
3317 but must be evaluated only once.
3318
3319 Normally, expand_expr would reevaluate the expression each time.
3320 Calling save_expr produces something that is evaluated and recorded
3321 the first time expand_expr is called on it. Subsequent calls to
3322 expand_expr just reuse the recorded value.
3323
3324 The call to expand_expr that generates code that actually computes
3325 the value is the first call *at compile time*. Subsequent calls
3326 *at compile time* generate code to use the saved value.
3327 This produces correct result provided that *at run time* control
3328 always flows through the insns made by the first expand_expr
3329 before reaching the other places where the save_expr was evaluated.
3330 You, the caller of save_expr, must make sure this is so.
3331
3332 Constants, and certain read-only nodes, are returned with no
3333 SAVE_EXPR because that is safe. Expressions containing placeholders
3334 are not touched; see tree.def for an explanation of what these
3335 are used for. */
3336
3337 tree
3338 save_expr (tree expr)
3339 {
3340 tree t = fold (expr);
3341 tree inner;
3342
3343 /* If the tree evaluates to a constant, then we don't want to hide that
3344 fact (i.e. this allows further folding, and direct checks for constants).
3345 However, a read-only object that has side effects cannot be bypassed.
3346 Since it is no problem to reevaluate literals, we just return the
3347 literal node. */
3348 inner = skip_simple_arithmetic (t);
3349 if (TREE_CODE (inner) == ERROR_MARK)
3350 return inner;
3351
3352 if (tree_invariant_p_1 (inner))
3353 return t;
3354
3355 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3356 it means that the size or offset of some field of an object depends on
3357 the value within another field.
3358
3359 Note that it must not be the case that T contains both a PLACEHOLDER_EXPR
3360 and some variable since it would then need to be both evaluated once and
3361 evaluated more than once. Front-ends must assure this case cannot
3362 happen by surrounding any such subexpressions in their own SAVE_EXPR
3363 and forcing evaluation at the proper time. */
3364 if (contains_placeholder_p (inner))
3365 return t;
3366
3367 t = build1 (SAVE_EXPR, TREE_TYPE (expr), t);
3368 SET_EXPR_LOCATION (t, EXPR_LOCATION (expr));
3369
3370 /* This expression might be placed ahead of a jump to ensure that the
3371 value was computed on both sides of the jump. So make sure it isn't
3372 eliminated as dead. */
3373 TREE_SIDE_EFFECTS (t) = 1;
3374 return t;
3375 }
3376
3377 /* Look inside EXPR into any simple arithmetic operations. Return the
3378 outermost non-arithmetic or non-invariant node. */
3379
3380 tree
3381 skip_simple_arithmetic (tree expr)
3382 {
3383 /* We don't care about whether this can be used as an lvalue in this
3384 context. */
3385 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3386 expr = TREE_OPERAND (expr, 0);
3387
3388 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3389 a constant, it will be more efficient to not make another SAVE_EXPR since
3390 it will allow better simplification and GCSE will be able to merge the
3391 computations if they actually occur. */
3392 while (true)
3393 {
3394 if (UNARY_CLASS_P (expr))
3395 expr = TREE_OPERAND (expr, 0);
3396 else if (BINARY_CLASS_P (expr))
3397 {
3398 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3399 expr = TREE_OPERAND (expr, 0);
3400 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3401 expr = TREE_OPERAND (expr, 1);
3402 else
3403 break;
3404 }
3405 else
3406 break;
3407 }
3408
3409 return expr;
3410 }
3411
3412 /* Look inside EXPR into simple arithmetic operations involving constants.
3413 Return the outermost non-arithmetic or non-constant node. */
3414
3415 tree
3416 skip_simple_constant_arithmetic (tree expr)
3417 {
3418 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3419 expr = TREE_OPERAND (expr, 0);
3420
3421 while (true)
3422 {
3423 if (UNARY_CLASS_P (expr))
3424 expr = TREE_OPERAND (expr, 0);
3425 else if (BINARY_CLASS_P (expr))
3426 {
3427 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3428 expr = TREE_OPERAND (expr, 0);
3429 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3430 expr = TREE_OPERAND (expr, 1);
3431 else
3432 break;
3433 }
3434 else
3435 break;
3436 }
3437
3438 return expr;
3439 }
3440
3441 /* Return which tree structure is used by T. */
3442
3443 enum tree_node_structure_enum
3444 tree_node_structure (const_tree t)
3445 {
3446 const enum tree_code code = TREE_CODE (t);
3447 return tree_node_structure_for_code (code);
3448 }
3449
3450 /* Set various status flags when building a CALL_EXPR object T. */
3451
3452 static void
3453 process_call_operands (tree t)
3454 {
3455 bool side_effects = TREE_SIDE_EFFECTS (t);
3456 bool read_only = false;
3457 int i = call_expr_flags (t);
3458
3459 /* Calls have side-effects, except those to const or pure functions. */
3460 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3461 side_effects = true;
3462 /* Propagate TREE_READONLY of arguments for const functions. */
3463 if (i & ECF_CONST)
3464 read_only = true;
3465
3466 if (!side_effects || read_only)
3467 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3468 {
3469 tree op = TREE_OPERAND (t, i);
3470 if (op && TREE_SIDE_EFFECTS (op))
3471 side_effects = true;
3472 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3473 read_only = false;
3474 }
3475
3476 TREE_SIDE_EFFECTS (t) = side_effects;
3477 TREE_READONLY (t) = read_only;
3478 }
3479 \f
3480 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3481 size or offset that depends on a field within a record. */
3482
3483 bool
3484 contains_placeholder_p (const_tree exp)
3485 {
3486 enum tree_code code;
3487
3488 if (!exp)
3489 return 0;
3490
3491 code = TREE_CODE (exp);
3492 if (code == PLACEHOLDER_EXPR)
3493 return 1;
3494
3495 switch (TREE_CODE_CLASS (code))
3496 {
3497 case tcc_reference:
3498 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3499 position computations since they will be converted into a
3500 WITH_RECORD_EXPR involving the reference, which will assume
3501 here will be valid. */
3502 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3503
3504 case tcc_exceptional:
3505 if (code == TREE_LIST)
3506 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3507 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3508 break;
3509
3510 case tcc_unary:
3511 case tcc_binary:
3512 case tcc_comparison:
3513 case tcc_expression:
3514 switch (code)
3515 {
3516 case COMPOUND_EXPR:
3517 /* Ignoring the first operand isn't quite right, but works best. */
3518 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3519
3520 case COND_EXPR:
3521 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3522 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3523 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3524
3525 case SAVE_EXPR:
3526 /* The save_expr function never wraps anything containing
3527 a PLACEHOLDER_EXPR. */
3528 return 0;
3529
3530 default:
3531 break;
3532 }
3533
3534 switch (TREE_CODE_LENGTH (code))
3535 {
3536 case 1:
3537 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3538 case 2:
3539 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3540 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3541 default:
3542 return 0;
3543 }
3544
3545 case tcc_vl_exp:
3546 switch (code)
3547 {
3548 case CALL_EXPR:
3549 {
3550 const_tree arg;
3551 const_call_expr_arg_iterator iter;
3552 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3553 if (CONTAINS_PLACEHOLDER_P (arg))
3554 return 1;
3555 return 0;
3556 }
3557 default:
3558 return 0;
3559 }
3560
3561 default:
3562 return 0;
3563 }
3564 return 0;
3565 }
3566
3567 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3568 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3569 field positions. */
3570
3571 static bool
3572 type_contains_placeholder_1 (const_tree type)
3573 {
3574 /* If the size contains a placeholder or the parent type (component type in
3575 the case of arrays) type involves a placeholder, this type does. */
3576 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3577 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3578 || (!POINTER_TYPE_P (type)
3579 && TREE_TYPE (type)
3580 && type_contains_placeholder_p (TREE_TYPE (type))))
3581 return true;
3582
3583 /* Now do type-specific checks. Note that the last part of the check above
3584 greatly limits what we have to do below. */
3585 switch (TREE_CODE (type))
3586 {
3587 case VOID_TYPE:
3588 case POINTER_BOUNDS_TYPE:
3589 case COMPLEX_TYPE:
3590 case ENUMERAL_TYPE:
3591 case BOOLEAN_TYPE:
3592 case POINTER_TYPE:
3593 case OFFSET_TYPE:
3594 case REFERENCE_TYPE:
3595 case METHOD_TYPE:
3596 case FUNCTION_TYPE:
3597 case VECTOR_TYPE:
3598 case NULLPTR_TYPE:
3599 return false;
3600
3601 case INTEGER_TYPE:
3602 case REAL_TYPE:
3603 case FIXED_POINT_TYPE:
3604 /* Here we just check the bounds. */
3605 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3606 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3607
3608 case ARRAY_TYPE:
3609 /* We have already checked the component type above, so just check
3610 the domain type. Flexible array members have a null domain. */
3611 return TYPE_DOMAIN (type) ?
3612 type_contains_placeholder_p (TYPE_DOMAIN (type)) : false;
3613
3614 case RECORD_TYPE:
3615 case UNION_TYPE:
3616 case QUAL_UNION_TYPE:
3617 {
3618 tree field;
3619
3620 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3621 if (TREE_CODE (field) == FIELD_DECL
3622 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3623 || (TREE_CODE (type) == QUAL_UNION_TYPE
3624 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3625 || type_contains_placeholder_p (TREE_TYPE (field))))
3626 return true;
3627
3628 return false;
3629 }
3630
3631 default:
3632 gcc_unreachable ();
3633 }
3634 }
3635
3636 /* Wrapper around above function used to cache its result. */
3637
3638 bool
3639 type_contains_placeholder_p (tree type)
3640 {
3641 bool result;
3642
3643 /* If the contains_placeholder_bits field has been initialized,
3644 then we know the answer. */
3645 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3646 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3647
3648 /* Indicate that we've seen this type node, and the answer is false.
3649 This is what we want to return if we run into recursion via fields. */
3650 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3651
3652 /* Compute the real value. */
3653 result = type_contains_placeholder_1 (type);
3654
3655 /* Store the real value. */
3656 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3657
3658 return result;
3659 }
3660 \f
3661 /* Push tree EXP onto vector QUEUE if it is not already present. */
3662
3663 static void
3664 push_without_duplicates (tree exp, vec<tree> *queue)
3665 {
3666 unsigned int i;
3667 tree iter;
3668
3669 FOR_EACH_VEC_ELT (*queue, i, iter)
3670 if (simple_cst_equal (iter, exp) == 1)
3671 break;
3672
3673 if (!iter)
3674 queue->safe_push (exp);
3675 }
3676
3677 /* Given a tree EXP, find all occurrences of references to fields
3678 in a PLACEHOLDER_EXPR and place them in vector REFS without
3679 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3680 we assume here that EXP contains only arithmetic expressions
3681 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3682 argument list. */
3683
3684 void
3685 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3686 {
3687 enum tree_code code = TREE_CODE (exp);
3688 tree inner;
3689 int i;
3690
3691 /* We handle TREE_LIST and COMPONENT_REF separately. */
3692 if (code == TREE_LIST)
3693 {
3694 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3695 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3696 }
3697 else if (code == COMPONENT_REF)
3698 {
3699 for (inner = TREE_OPERAND (exp, 0);
3700 REFERENCE_CLASS_P (inner);
3701 inner = TREE_OPERAND (inner, 0))
3702 ;
3703
3704 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3705 push_without_duplicates (exp, refs);
3706 else
3707 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3708 }
3709 else
3710 switch (TREE_CODE_CLASS (code))
3711 {
3712 case tcc_constant:
3713 break;
3714
3715 case tcc_declaration:
3716 /* Variables allocated to static storage can stay. */
3717 if (!TREE_STATIC (exp))
3718 push_without_duplicates (exp, refs);
3719 break;
3720
3721 case tcc_expression:
3722 /* This is the pattern built in ada/make_aligning_type. */
3723 if (code == ADDR_EXPR
3724 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3725 {
3726 push_without_duplicates (exp, refs);
3727 break;
3728 }
3729
3730 /* Fall through... */
3731
3732 case tcc_exceptional:
3733 case tcc_unary:
3734 case tcc_binary:
3735 case tcc_comparison:
3736 case tcc_reference:
3737 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3738 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3739 break;
3740
3741 case tcc_vl_exp:
3742 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3743 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3744 break;
3745
3746 default:
3747 gcc_unreachable ();
3748 }
3749 }
3750
3751 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3752 return a tree with all occurrences of references to F in a
3753 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3754 CONST_DECLs. Note that we assume here that EXP contains only
3755 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3756 occurring only in their argument list. */
3757
3758 tree
3759 substitute_in_expr (tree exp, tree f, tree r)
3760 {
3761 enum tree_code code = TREE_CODE (exp);
3762 tree op0, op1, op2, op3;
3763 tree new_tree;
3764
3765 /* We handle TREE_LIST and COMPONENT_REF separately. */
3766 if (code == TREE_LIST)
3767 {
3768 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3769 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3770 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3771 return exp;
3772
3773 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3774 }
3775 else if (code == COMPONENT_REF)
3776 {
3777 tree inner;
3778
3779 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3780 and it is the right field, replace it with R. */
3781 for (inner = TREE_OPERAND (exp, 0);
3782 REFERENCE_CLASS_P (inner);
3783 inner = TREE_OPERAND (inner, 0))
3784 ;
3785
3786 /* The field. */
3787 op1 = TREE_OPERAND (exp, 1);
3788
3789 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3790 return r;
3791
3792 /* If this expression hasn't been completed let, leave it alone. */
3793 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
3794 return exp;
3795
3796 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3797 if (op0 == TREE_OPERAND (exp, 0))
3798 return exp;
3799
3800 new_tree
3801 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
3802 }
3803 else
3804 switch (TREE_CODE_CLASS (code))
3805 {
3806 case tcc_constant:
3807 return exp;
3808
3809 case tcc_declaration:
3810 if (exp == f)
3811 return r;
3812 else
3813 return exp;
3814
3815 case tcc_expression:
3816 if (exp == f)
3817 return r;
3818
3819 /* Fall through... */
3820
3821 case tcc_exceptional:
3822 case tcc_unary:
3823 case tcc_binary:
3824 case tcc_comparison:
3825 case tcc_reference:
3826 switch (TREE_CODE_LENGTH (code))
3827 {
3828 case 0:
3829 return exp;
3830
3831 case 1:
3832 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3833 if (op0 == TREE_OPERAND (exp, 0))
3834 return exp;
3835
3836 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3837 break;
3838
3839 case 2:
3840 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3841 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3842
3843 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3844 return exp;
3845
3846 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3847 break;
3848
3849 case 3:
3850 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3851 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3852 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3853
3854 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3855 && op2 == TREE_OPERAND (exp, 2))
3856 return exp;
3857
3858 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3859 break;
3860
3861 case 4:
3862 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3863 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3864 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3865 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
3866
3867 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3868 && op2 == TREE_OPERAND (exp, 2)
3869 && op3 == TREE_OPERAND (exp, 3))
3870 return exp;
3871
3872 new_tree
3873 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3874 break;
3875
3876 default:
3877 gcc_unreachable ();
3878 }
3879 break;
3880
3881 case tcc_vl_exp:
3882 {
3883 int i;
3884
3885 new_tree = NULL_TREE;
3886
3887 /* If we are trying to replace F with a constant, inline back
3888 functions which do nothing else than computing a value from
3889 the arguments they are passed. This makes it possible to
3890 fold partially or entirely the replacement expression. */
3891 if (CONSTANT_CLASS_P (r) && code == CALL_EXPR)
3892 {
3893 tree t = maybe_inline_call_in_expr (exp);
3894 if (t)
3895 return SUBSTITUTE_IN_EXPR (t, f, r);
3896 }
3897
3898 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3899 {
3900 tree op = TREE_OPERAND (exp, i);
3901 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
3902 if (new_op != op)
3903 {
3904 if (!new_tree)
3905 new_tree = copy_node (exp);
3906 TREE_OPERAND (new_tree, i) = new_op;
3907 }
3908 }
3909
3910 if (new_tree)
3911 {
3912 new_tree = fold (new_tree);
3913 if (TREE_CODE (new_tree) == CALL_EXPR)
3914 process_call_operands (new_tree);
3915 }
3916 else
3917 return exp;
3918 }
3919 break;
3920
3921 default:
3922 gcc_unreachable ();
3923 }
3924
3925 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3926
3927 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3928 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3929
3930 return new_tree;
3931 }
3932
3933 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3934 for it within OBJ, a tree that is an object or a chain of references. */
3935
3936 tree
3937 substitute_placeholder_in_expr (tree exp, tree obj)
3938 {
3939 enum tree_code code = TREE_CODE (exp);
3940 tree op0, op1, op2, op3;
3941 tree new_tree;
3942
3943 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3944 in the chain of OBJ. */
3945 if (code == PLACEHOLDER_EXPR)
3946 {
3947 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
3948 tree elt;
3949
3950 for (elt = obj; elt != 0;
3951 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3952 || TREE_CODE (elt) == COND_EXPR)
3953 ? TREE_OPERAND (elt, 1)
3954 : (REFERENCE_CLASS_P (elt)
3955 || UNARY_CLASS_P (elt)
3956 || BINARY_CLASS_P (elt)
3957 || VL_EXP_CLASS_P (elt)
3958 || EXPRESSION_CLASS_P (elt))
3959 ? TREE_OPERAND (elt, 0) : 0))
3960 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
3961 return elt;
3962
3963 for (elt = obj; elt != 0;
3964 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3965 || TREE_CODE (elt) == COND_EXPR)
3966 ? TREE_OPERAND (elt, 1)
3967 : (REFERENCE_CLASS_P (elt)
3968 || UNARY_CLASS_P (elt)
3969 || BINARY_CLASS_P (elt)
3970 || VL_EXP_CLASS_P (elt)
3971 || EXPRESSION_CLASS_P (elt))
3972 ? TREE_OPERAND (elt, 0) : 0))
3973 if (POINTER_TYPE_P (TREE_TYPE (elt))
3974 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
3975 == need_type))
3976 return fold_build1 (INDIRECT_REF, need_type, elt);
3977
3978 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
3979 survives until RTL generation, there will be an error. */
3980 return exp;
3981 }
3982
3983 /* TREE_LIST is special because we need to look at TREE_VALUE
3984 and TREE_CHAIN, not TREE_OPERANDS. */
3985 else if (code == TREE_LIST)
3986 {
3987 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
3988 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
3989 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3990 return exp;
3991
3992 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3993 }
3994 else
3995 switch (TREE_CODE_CLASS (code))
3996 {
3997 case tcc_constant:
3998 case tcc_declaration:
3999 return exp;
4000
4001 case tcc_exceptional:
4002 case tcc_unary:
4003 case tcc_binary:
4004 case tcc_comparison:
4005 case tcc_expression:
4006 case tcc_reference:
4007 case tcc_statement:
4008 switch (TREE_CODE_LENGTH (code))
4009 {
4010 case 0:
4011 return exp;
4012
4013 case 1:
4014 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4015 if (op0 == TREE_OPERAND (exp, 0))
4016 return exp;
4017
4018 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4019 break;
4020
4021 case 2:
4022 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4023 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4024
4025 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4026 return exp;
4027
4028 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4029 break;
4030
4031 case 3:
4032 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4033 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4034 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4035
4036 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4037 && op2 == TREE_OPERAND (exp, 2))
4038 return exp;
4039
4040 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4041 break;
4042
4043 case 4:
4044 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4045 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4046 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4047 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
4048
4049 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4050 && op2 == TREE_OPERAND (exp, 2)
4051 && op3 == TREE_OPERAND (exp, 3))
4052 return exp;
4053
4054 new_tree
4055 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4056 break;
4057
4058 default:
4059 gcc_unreachable ();
4060 }
4061 break;
4062
4063 case tcc_vl_exp:
4064 {
4065 int i;
4066
4067 new_tree = NULL_TREE;
4068
4069 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4070 {
4071 tree op = TREE_OPERAND (exp, i);
4072 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
4073 if (new_op != op)
4074 {
4075 if (!new_tree)
4076 new_tree = copy_node (exp);
4077 TREE_OPERAND (new_tree, i) = new_op;
4078 }
4079 }
4080
4081 if (new_tree)
4082 {
4083 new_tree = fold (new_tree);
4084 if (TREE_CODE (new_tree) == CALL_EXPR)
4085 process_call_operands (new_tree);
4086 }
4087 else
4088 return exp;
4089 }
4090 break;
4091
4092 default:
4093 gcc_unreachable ();
4094 }
4095
4096 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4097
4098 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4099 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4100
4101 return new_tree;
4102 }
4103 \f
4104
4105 /* Subroutine of stabilize_reference; this is called for subtrees of
4106 references. Any expression with side-effects must be put in a SAVE_EXPR
4107 to ensure that it is only evaluated once.
4108
4109 We don't put SAVE_EXPR nodes around everything, because assigning very
4110 simple expressions to temporaries causes us to miss good opportunities
4111 for optimizations. Among other things, the opportunity to fold in the
4112 addition of a constant into an addressing mode often gets lost, e.g.
4113 "y[i+1] += x;". In general, we take the approach that we should not make
4114 an assignment unless we are forced into it - i.e., that any non-side effect
4115 operator should be allowed, and that cse should take care of coalescing
4116 multiple utterances of the same expression should that prove fruitful. */
4117
4118 static tree
4119 stabilize_reference_1 (tree e)
4120 {
4121 tree result;
4122 enum tree_code code = TREE_CODE (e);
4123
4124 /* We cannot ignore const expressions because it might be a reference
4125 to a const array but whose index contains side-effects. But we can
4126 ignore things that are actual constant or that already have been
4127 handled by this function. */
4128
4129 if (tree_invariant_p (e))
4130 return e;
4131
4132 switch (TREE_CODE_CLASS (code))
4133 {
4134 case tcc_exceptional:
4135 case tcc_type:
4136 case tcc_declaration:
4137 case tcc_comparison:
4138 case tcc_statement:
4139 case tcc_expression:
4140 case tcc_reference:
4141 case tcc_vl_exp:
4142 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4143 so that it will only be evaluated once. */
4144 /* The reference (r) and comparison (<) classes could be handled as
4145 below, but it is generally faster to only evaluate them once. */
4146 if (TREE_SIDE_EFFECTS (e))
4147 return save_expr (e);
4148 return e;
4149
4150 case tcc_constant:
4151 /* Constants need no processing. In fact, we should never reach
4152 here. */
4153 return e;
4154
4155 case tcc_binary:
4156 /* Division is slow and tends to be compiled with jumps,
4157 especially the division by powers of 2 that is often
4158 found inside of an array reference. So do it just once. */
4159 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4160 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4161 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4162 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4163 return save_expr (e);
4164 /* Recursively stabilize each operand. */
4165 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4166 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4167 break;
4168
4169 case tcc_unary:
4170 /* Recursively stabilize each operand. */
4171 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4172 break;
4173
4174 default:
4175 gcc_unreachable ();
4176 }
4177
4178 TREE_TYPE (result) = TREE_TYPE (e);
4179 TREE_READONLY (result) = TREE_READONLY (e);
4180 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4181 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4182
4183 return result;
4184 }
4185
4186 /* Stabilize a reference so that we can use it any number of times
4187 without causing its operands to be evaluated more than once.
4188 Returns the stabilized reference. This works by means of save_expr,
4189 so see the caveats in the comments about save_expr.
4190
4191 Also allows conversion expressions whose operands are references.
4192 Any other kind of expression is returned unchanged. */
4193
4194 tree
4195 stabilize_reference (tree ref)
4196 {
4197 tree result;
4198 enum tree_code code = TREE_CODE (ref);
4199
4200 switch (code)
4201 {
4202 case VAR_DECL:
4203 case PARM_DECL:
4204 case RESULT_DECL:
4205 /* No action is needed in this case. */
4206 return ref;
4207
4208 CASE_CONVERT:
4209 case FLOAT_EXPR:
4210 case FIX_TRUNC_EXPR:
4211 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4212 break;
4213
4214 case INDIRECT_REF:
4215 result = build_nt (INDIRECT_REF,
4216 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4217 break;
4218
4219 case COMPONENT_REF:
4220 result = build_nt (COMPONENT_REF,
4221 stabilize_reference (TREE_OPERAND (ref, 0)),
4222 TREE_OPERAND (ref, 1), NULL_TREE);
4223 break;
4224
4225 case BIT_FIELD_REF:
4226 result = build_nt (BIT_FIELD_REF,
4227 stabilize_reference (TREE_OPERAND (ref, 0)),
4228 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4229 REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
4230 break;
4231
4232 case ARRAY_REF:
4233 result = build_nt (ARRAY_REF,
4234 stabilize_reference (TREE_OPERAND (ref, 0)),
4235 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4236 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4237 break;
4238
4239 case ARRAY_RANGE_REF:
4240 result = build_nt (ARRAY_RANGE_REF,
4241 stabilize_reference (TREE_OPERAND (ref, 0)),
4242 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4243 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4244 break;
4245
4246 case COMPOUND_EXPR:
4247 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4248 it wouldn't be ignored. This matters when dealing with
4249 volatiles. */
4250 return stabilize_reference_1 (ref);
4251
4252 /* If arg isn't a kind of lvalue we recognize, make no change.
4253 Caller should recognize the error for an invalid lvalue. */
4254 default:
4255 return ref;
4256
4257 case ERROR_MARK:
4258 return error_mark_node;
4259 }
4260
4261 TREE_TYPE (result) = TREE_TYPE (ref);
4262 TREE_READONLY (result) = TREE_READONLY (ref);
4263 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4264 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4265
4266 return result;
4267 }
4268 \f
4269 /* Low-level constructors for expressions. */
4270
4271 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4272 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4273
4274 void
4275 recompute_tree_invariant_for_addr_expr (tree t)
4276 {
4277 tree node;
4278 bool tc = true, se = false;
4279
4280 gcc_assert (TREE_CODE (t) == ADDR_EXPR);
4281
4282 /* We started out assuming this address is both invariant and constant, but
4283 does not have side effects. Now go down any handled components and see if
4284 any of them involve offsets that are either non-constant or non-invariant.
4285 Also check for side-effects.
4286
4287 ??? Note that this code makes no attempt to deal with the case where
4288 taking the address of something causes a copy due to misalignment. */
4289
4290 #define UPDATE_FLAGS(NODE) \
4291 do { tree _node = (NODE); \
4292 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4293 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4294
4295 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4296 node = TREE_OPERAND (node, 0))
4297 {
4298 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4299 array reference (probably made temporarily by the G++ front end),
4300 so ignore all the operands. */
4301 if ((TREE_CODE (node) == ARRAY_REF
4302 || TREE_CODE (node) == ARRAY_RANGE_REF)
4303 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4304 {
4305 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4306 if (TREE_OPERAND (node, 2))
4307 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4308 if (TREE_OPERAND (node, 3))
4309 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4310 }
4311 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4312 FIELD_DECL, apparently. The G++ front end can put something else
4313 there, at least temporarily. */
4314 else if (TREE_CODE (node) == COMPONENT_REF
4315 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4316 {
4317 if (TREE_OPERAND (node, 2))
4318 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4319 }
4320 }
4321
4322 node = lang_hooks.expr_to_decl (node, &tc, &se);
4323
4324 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4325 the address, since &(*a)->b is a form of addition. If it's a constant, the
4326 address is constant too. If it's a decl, its address is constant if the
4327 decl is static. Everything else is not constant and, furthermore,
4328 taking the address of a volatile variable is not volatile. */
4329 if (TREE_CODE (node) == INDIRECT_REF
4330 || TREE_CODE (node) == MEM_REF)
4331 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4332 else if (CONSTANT_CLASS_P (node))
4333 ;
4334 else if (DECL_P (node))
4335 tc &= (staticp (node) != NULL_TREE);
4336 else
4337 {
4338 tc = false;
4339 se |= TREE_SIDE_EFFECTS (node);
4340 }
4341
4342
4343 TREE_CONSTANT (t) = tc;
4344 TREE_SIDE_EFFECTS (t) = se;
4345 #undef UPDATE_FLAGS
4346 }
4347
4348 /* Build an expression of code CODE, data type TYPE, and operands as
4349 specified. Expressions and reference nodes can be created this way.
4350 Constants, decls, types and misc nodes cannot be.
4351
4352 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4353 enough for all extant tree codes. */
4354
4355 tree
4356 build0_stat (enum tree_code code, tree tt MEM_STAT_DECL)
4357 {
4358 tree t;
4359
4360 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4361
4362 t = make_node_stat (code PASS_MEM_STAT);
4363 TREE_TYPE (t) = tt;
4364
4365 return t;
4366 }
4367
4368 tree
4369 build1_stat (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4370 {
4371 int length = sizeof (struct tree_exp);
4372 tree t;
4373
4374 record_node_allocation_statistics (code, length);
4375
4376 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4377
4378 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4379
4380 memset (t, 0, sizeof (struct tree_common));
4381
4382 TREE_SET_CODE (t, code);
4383
4384 TREE_TYPE (t) = type;
4385 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4386 TREE_OPERAND (t, 0) = node;
4387 if (node && !TYPE_P (node))
4388 {
4389 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4390 TREE_READONLY (t) = TREE_READONLY (node);
4391 }
4392
4393 if (TREE_CODE_CLASS (code) == tcc_statement)
4394 TREE_SIDE_EFFECTS (t) = 1;
4395 else switch (code)
4396 {
4397 case VA_ARG_EXPR:
4398 /* All of these have side-effects, no matter what their
4399 operands are. */
4400 TREE_SIDE_EFFECTS (t) = 1;
4401 TREE_READONLY (t) = 0;
4402 break;
4403
4404 case INDIRECT_REF:
4405 /* Whether a dereference is readonly has nothing to do with whether
4406 its operand is readonly. */
4407 TREE_READONLY (t) = 0;
4408 break;
4409
4410 case ADDR_EXPR:
4411 if (node)
4412 recompute_tree_invariant_for_addr_expr (t);
4413 break;
4414
4415 default:
4416 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4417 && node && !TYPE_P (node)
4418 && TREE_CONSTANT (node))
4419 TREE_CONSTANT (t) = 1;
4420 if (TREE_CODE_CLASS (code) == tcc_reference
4421 && node && TREE_THIS_VOLATILE (node))
4422 TREE_THIS_VOLATILE (t) = 1;
4423 break;
4424 }
4425
4426 return t;
4427 }
4428
4429 #define PROCESS_ARG(N) \
4430 do { \
4431 TREE_OPERAND (t, N) = arg##N; \
4432 if (arg##N &&!TYPE_P (arg##N)) \
4433 { \
4434 if (TREE_SIDE_EFFECTS (arg##N)) \
4435 side_effects = 1; \
4436 if (!TREE_READONLY (arg##N) \
4437 && !CONSTANT_CLASS_P (arg##N)) \
4438 (void) (read_only = 0); \
4439 if (!TREE_CONSTANT (arg##N)) \
4440 (void) (constant = 0); \
4441 } \
4442 } while (0)
4443
4444 tree
4445 build2_stat (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4446 {
4447 bool constant, read_only, side_effects;
4448 tree t;
4449
4450 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4451
4452 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4453 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4454 /* When sizetype precision doesn't match that of pointers
4455 we need to be able to build explicit extensions or truncations
4456 of the offset argument. */
4457 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4458 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4459 && TREE_CODE (arg1) == INTEGER_CST);
4460
4461 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4462 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4463 && ptrofftype_p (TREE_TYPE (arg1)));
4464
4465 t = make_node_stat (code PASS_MEM_STAT);
4466 TREE_TYPE (t) = tt;
4467
4468 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4469 result based on those same flags for the arguments. But if the
4470 arguments aren't really even `tree' expressions, we shouldn't be trying
4471 to do this. */
4472
4473 /* Expressions without side effects may be constant if their
4474 arguments are as well. */
4475 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4476 || TREE_CODE_CLASS (code) == tcc_binary);
4477 read_only = 1;
4478 side_effects = TREE_SIDE_EFFECTS (t);
4479
4480 PROCESS_ARG (0);
4481 PROCESS_ARG (1);
4482
4483 TREE_SIDE_EFFECTS (t) = side_effects;
4484 if (code == MEM_REF)
4485 {
4486 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4487 {
4488 tree o = TREE_OPERAND (arg0, 0);
4489 TREE_READONLY (t) = TREE_READONLY (o);
4490 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4491 }
4492 }
4493 else
4494 {
4495 TREE_READONLY (t) = read_only;
4496 TREE_CONSTANT (t) = constant;
4497 TREE_THIS_VOLATILE (t)
4498 = (TREE_CODE_CLASS (code) == tcc_reference
4499 && arg0 && TREE_THIS_VOLATILE (arg0));
4500 }
4501
4502 return t;
4503 }
4504
4505
4506 tree
4507 build3_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4508 tree arg2 MEM_STAT_DECL)
4509 {
4510 bool constant, read_only, side_effects;
4511 tree t;
4512
4513 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4514 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4515
4516 t = make_node_stat (code PASS_MEM_STAT);
4517 TREE_TYPE (t) = tt;
4518
4519 read_only = 1;
4520
4521 /* As a special exception, if COND_EXPR has NULL branches, we
4522 assume that it is a gimple statement and always consider
4523 it to have side effects. */
4524 if (code == COND_EXPR
4525 && tt == void_type_node
4526 && arg1 == NULL_TREE
4527 && arg2 == NULL_TREE)
4528 side_effects = true;
4529 else
4530 side_effects = TREE_SIDE_EFFECTS (t);
4531
4532 PROCESS_ARG (0);
4533 PROCESS_ARG (1);
4534 PROCESS_ARG (2);
4535
4536 if (code == COND_EXPR)
4537 TREE_READONLY (t) = read_only;
4538
4539 TREE_SIDE_EFFECTS (t) = side_effects;
4540 TREE_THIS_VOLATILE (t)
4541 = (TREE_CODE_CLASS (code) == tcc_reference
4542 && arg0 && TREE_THIS_VOLATILE (arg0));
4543
4544 return t;
4545 }
4546
4547 tree
4548 build4_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4549 tree arg2, tree arg3 MEM_STAT_DECL)
4550 {
4551 bool constant, read_only, side_effects;
4552 tree t;
4553
4554 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4555
4556 t = make_node_stat (code PASS_MEM_STAT);
4557 TREE_TYPE (t) = tt;
4558
4559 side_effects = TREE_SIDE_EFFECTS (t);
4560
4561 PROCESS_ARG (0);
4562 PROCESS_ARG (1);
4563 PROCESS_ARG (2);
4564 PROCESS_ARG (3);
4565
4566 TREE_SIDE_EFFECTS (t) = side_effects;
4567 TREE_THIS_VOLATILE (t)
4568 = (TREE_CODE_CLASS (code) == tcc_reference
4569 && arg0 && TREE_THIS_VOLATILE (arg0));
4570
4571 return t;
4572 }
4573
4574 tree
4575 build5_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4576 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4577 {
4578 bool constant, read_only, side_effects;
4579 tree t;
4580
4581 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4582
4583 t = make_node_stat (code PASS_MEM_STAT);
4584 TREE_TYPE (t) = tt;
4585
4586 side_effects = TREE_SIDE_EFFECTS (t);
4587
4588 PROCESS_ARG (0);
4589 PROCESS_ARG (1);
4590 PROCESS_ARG (2);
4591 PROCESS_ARG (3);
4592 PROCESS_ARG (4);
4593
4594 TREE_SIDE_EFFECTS (t) = side_effects;
4595 if (code == TARGET_MEM_REF)
4596 {
4597 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4598 {
4599 tree o = TREE_OPERAND (arg0, 0);
4600 TREE_READONLY (t) = TREE_READONLY (o);
4601 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4602 }
4603 }
4604 else
4605 TREE_THIS_VOLATILE (t)
4606 = (TREE_CODE_CLASS (code) == tcc_reference
4607 && arg0 && TREE_THIS_VOLATILE (arg0));
4608
4609 return t;
4610 }
4611
4612 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4613 on the pointer PTR. */
4614
4615 tree
4616 build_simple_mem_ref_loc (location_t loc, tree ptr)
4617 {
4618 HOST_WIDE_INT offset = 0;
4619 tree ptype = TREE_TYPE (ptr);
4620 tree tem;
4621 /* For convenience allow addresses that collapse to a simple base
4622 and offset. */
4623 if (TREE_CODE (ptr) == ADDR_EXPR
4624 && (handled_component_p (TREE_OPERAND (ptr, 0))
4625 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4626 {
4627 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4628 gcc_assert (ptr);
4629 ptr = build_fold_addr_expr (ptr);
4630 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4631 }
4632 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4633 ptr, build_int_cst (ptype, offset));
4634 SET_EXPR_LOCATION (tem, loc);
4635 return tem;
4636 }
4637
4638 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4639
4640 offset_int
4641 mem_ref_offset (const_tree t)
4642 {
4643 return offset_int::from (TREE_OPERAND (t, 1), SIGNED);
4644 }
4645
4646 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4647 offsetted by OFFSET units. */
4648
4649 tree
4650 build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
4651 {
4652 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4653 build_fold_addr_expr (base),
4654 build_int_cst (ptr_type_node, offset));
4655 tree addr = build1 (ADDR_EXPR, type, ref);
4656 recompute_tree_invariant_for_addr_expr (addr);
4657 return addr;
4658 }
4659
4660 /* Similar except don't specify the TREE_TYPE
4661 and leave the TREE_SIDE_EFFECTS as 0.
4662 It is permissible for arguments to be null,
4663 or even garbage if their values do not matter. */
4664
4665 tree
4666 build_nt (enum tree_code code, ...)
4667 {
4668 tree t;
4669 int length;
4670 int i;
4671 va_list p;
4672
4673 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4674
4675 va_start (p, code);
4676
4677 t = make_node (code);
4678 length = TREE_CODE_LENGTH (code);
4679
4680 for (i = 0; i < length; i++)
4681 TREE_OPERAND (t, i) = va_arg (p, tree);
4682
4683 va_end (p);
4684 return t;
4685 }
4686
4687 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4688 tree vec. */
4689
4690 tree
4691 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4692 {
4693 tree ret, t;
4694 unsigned int ix;
4695
4696 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4697 CALL_EXPR_FN (ret) = fn;
4698 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4699 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4700 CALL_EXPR_ARG (ret, ix) = t;
4701 return ret;
4702 }
4703 \f
4704 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4705 We do NOT enter this node in any sort of symbol table.
4706
4707 LOC is the location of the decl.
4708
4709 layout_decl is used to set up the decl's storage layout.
4710 Other slots are initialized to 0 or null pointers. */
4711
4712 tree
4713 build_decl_stat (location_t loc, enum tree_code code, tree name,
4714 tree type MEM_STAT_DECL)
4715 {
4716 tree t;
4717
4718 t = make_node_stat (code PASS_MEM_STAT);
4719 DECL_SOURCE_LOCATION (t) = loc;
4720
4721 /* if (type == error_mark_node)
4722 type = integer_type_node; */
4723 /* That is not done, deliberately, so that having error_mark_node
4724 as the type can suppress useless errors in the use of this variable. */
4725
4726 DECL_NAME (t) = name;
4727 TREE_TYPE (t) = type;
4728
4729 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4730 layout_decl (t, 0);
4731
4732 return t;
4733 }
4734
4735 /* Builds and returns function declaration with NAME and TYPE. */
4736
4737 tree
4738 build_fn_decl (const char *name, tree type)
4739 {
4740 tree id = get_identifier (name);
4741 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4742
4743 DECL_EXTERNAL (decl) = 1;
4744 TREE_PUBLIC (decl) = 1;
4745 DECL_ARTIFICIAL (decl) = 1;
4746 TREE_NOTHROW (decl) = 1;
4747
4748 return decl;
4749 }
4750
4751 vec<tree, va_gc> *all_translation_units;
4752
4753 /* Builds a new translation-unit decl with name NAME, queues it in the
4754 global list of translation-unit decls and returns it. */
4755
4756 tree
4757 build_translation_unit_decl (tree name)
4758 {
4759 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
4760 name, NULL_TREE);
4761 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
4762 vec_safe_push (all_translation_units, tu);
4763 return tu;
4764 }
4765
4766 \f
4767 /* BLOCK nodes are used to represent the structure of binding contours
4768 and declarations, once those contours have been exited and their contents
4769 compiled. This information is used for outputting debugging info. */
4770
4771 tree
4772 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
4773 {
4774 tree block = make_node (BLOCK);
4775
4776 BLOCK_VARS (block) = vars;
4777 BLOCK_SUBBLOCKS (block) = subblocks;
4778 BLOCK_SUPERCONTEXT (block) = supercontext;
4779 BLOCK_CHAIN (block) = chain;
4780 return block;
4781 }
4782
4783 \f
4784 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4785
4786 LOC is the location to use in tree T. */
4787
4788 void
4789 protected_set_expr_location (tree t, location_t loc)
4790 {
4791 if (CAN_HAVE_LOCATION_P (t))
4792 SET_EXPR_LOCATION (t, loc);
4793 }
4794 \f
4795 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES
4796 is ATTRIBUTE. */
4797
4798 tree
4799 build_decl_attribute_variant (tree ddecl, tree attribute)
4800 {
4801 DECL_ATTRIBUTES (ddecl) = attribute;
4802 return ddecl;
4803 }
4804
4805 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4806 is ATTRIBUTE and its qualifiers are QUALS.
4807
4808 Record such modified types already made so we don't make duplicates. */
4809
4810 tree
4811 build_type_attribute_qual_variant (tree ttype, tree attribute, int quals)
4812 {
4813 if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype), attribute))
4814 {
4815 inchash::hash hstate;
4816 tree ntype;
4817 int i;
4818 tree t;
4819 enum tree_code code = TREE_CODE (ttype);
4820
4821 /* Building a distinct copy of a tagged type is inappropriate; it
4822 causes breakage in code that expects there to be a one-to-one
4823 relationship between a struct and its fields.
4824 build_duplicate_type is another solution (as used in
4825 handle_transparent_union_attribute), but that doesn't play well
4826 with the stronger C++ type identity model. */
4827 if (TREE_CODE (ttype) == RECORD_TYPE
4828 || TREE_CODE (ttype) == UNION_TYPE
4829 || TREE_CODE (ttype) == QUAL_UNION_TYPE
4830 || TREE_CODE (ttype) == ENUMERAL_TYPE)
4831 {
4832 warning (OPT_Wattributes,
4833 "ignoring attributes applied to %qT after definition",
4834 TYPE_MAIN_VARIANT (ttype));
4835 return build_qualified_type (ttype, quals);
4836 }
4837
4838 ttype = build_qualified_type (ttype, TYPE_UNQUALIFIED);
4839 ntype = build_distinct_type_copy (ttype);
4840
4841 TYPE_ATTRIBUTES (ntype) = attribute;
4842
4843 hstate.add_int (code);
4844 if (TREE_TYPE (ntype))
4845 hstate.add_object (TYPE_HASH (TREE_TYPE (ntype)));
4846 attribute_hash_list (attribute, hstate);
4847
4848 switch (TREE_CODE (ntype))
4849 {
4850 case FUNCTION_TYPE:
4851 type_hash_list (TYPE_ARG_TYPES (ntype), hstate);
4852 break;
4853 case ARRAY_TYPE:
4854 if (TYPE_DOMAIN (ntype))
4855 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (ntype)));
4856 break;
4857 case INTEGER_TYPE:
4858 t = TYPE_MAX_VALUE (ntype);
4859 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
4860 hstate.add_object (TREE_INT_CST_ELT (t, i));
4861 break;
4862 case REAL_TYPE:
4863 case FIXED_POINT_TYPE:
4864 {
4865 unsigned int precision = TYPE_PRECISION (ntype);
4866 hstate.add_object (precision);
4867 }
4868 break;
4869 default:
4870 break;
4871 }
4872
4873 ntype = type_hash_canon (hstate.end(), ntype);
4874
4875 /* If the target-dependent attributes make NTYPE different from
4876 its canonical type, we will need to use structural equality
4877 checks for this type. */
4878 if (TYPE_STRUCTURAL_EQUALITY_P (ttype)
4879 || !comp_type_attributes (ntype, ttype))
4880 SET_TYPE_STRUCTURAL_EQUALITY (ntype);
4881 else if (TYPE_CANONICAL (ntype) == ntype)
4882 TYPE_CANONICAL (ntype) = TYPE_CANONICAL (ttype);
4883
4884 ttype = build_qualified_type (ntype, quals);
4885 }
4886 else if (TYPE_QUALS (ttype) != quals)
4887 ttype = build_qualified_type (ttype, quals);
4888
4889 return ttype;
4890 }
4891
4892 /* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are
4893 the same. */
4894
4895 static bool
4896 omp_declare_simd_clauses_equal (tree clauses1, tree clauses2)
4897 {
4898 tree cl1, cl2;
4899 for (cl1 = clauses1, cl2 = clauses2;
4900 cl1 && cl2;
4901 cl1 = OMP_CLAUSE_CHAIN (cl1), cl2 = OMP_CLAUSE_CHAIN (cl2))
4902 {
4903 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_CODE (cl2))
4904 return false;
4905 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_SIMDLEN)
4906 {
4907 if (simple_cst_equal (OMP_CLAUSE_DECL (cl1),
4908 OMP_CLAUSE_DECL (cl2)) != 1)
4909 return false;
4910 }
4911 switch (OMP_CLAUSE_CODE (cl1))
4912 {
4913 case OMP_CLAUSE_ALIGNED:
4914 if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1),
4915 OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2)) != 1)
4916 return false;
4917 break;
4918 case OMP_CLAUSE_LINEAR:
4919 if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1),
4920 OMP_CLAUSE_LINEAR_STEP (cl2)) != 1)
4921 return false;
4922 break;
4923 case OMP_CLAUSE_SIMDLEN:
4924 if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1),
4925 OMP_CLAUSE_SIMDLEN_EXPR (cl2)) != 1)
4926 return false;
4927 default:
4928 break;
4929 }
4930 }
4931 return true;
4932 }
4933
4934 /* Compare two constructor-element-type constants. Return 1 if the lists
4935 are known to be equal; otherwise return 0. */
4936
4937 static bool
4938 simple_cst_list_equal (const_tree l1, const_tree l2)
4939 {
4940 while (l1 != NULL_TREE && l2 != NULL_TREE)
4941 {
4942 if (simple_cst_equal (TREE_VALUE (l1), TREE_VALUE (l2)) != 1)
4943 return false;
4944
4945 l1 = TREE_CHAIN (l1);
4946 l2 = TREE_CHAIN (l2);
4947 }
4948
4949 return l1 == l2;
4950 }
4951
4952 /* Compare two identifier nodes representing attributes. Either one may
4953 be in wrapped __ATTR__ form. Return true if they are the same, false
4954 otherwise. */
4955
4956 static bool
4957 cmp_attrib_identifiers (const_tree attr1, const_tree attr2)
4958 {
4959 /* Make sure we're dealing with IDENTIFIER_NODEs. */
4960 gcc_checking_assert (TREE_CODE (attr1) == IDENTIFIER_NODE
4961 && TREE_CODE (attr2) == IDENTIFIER_NODE);
4962
4963 /* Identifiers can be compared directly for equality. */
4964 if (attr1 == attr2)
4965 return true;
4966
4967 /* If they are not equal, they may still be one in the form
4968 'text' while the other one is in the form '__text__'. TODO:
4969 If we were storing attributes in normalized 'text' form, then
4970 this could all go away and we could take full advantage of
4971 the fact that we're comparing identifiers. :-) */
4972 const size_t attr1_len = IDENTIFIER_LENGTH (attr1);
4973 const size_t attr2_len = IDENTIFIER_LENGTH (attr2);
4974
4975 if (attr2_len == attr1_len + 4)
4976 {
4977 const char *p = IDENTIFIER_POINTER (attr2);
4978 const char *q = IDENTIFIER_POINTER (attr1);
4979 if (p[0] == '_' && p[1] == '_'
4980 && p[attr2_len - 2] == '_' && p[attr2_len - 1] == '_'
4981 && strncmp (q, p + 2, attr1_len) == 0)
4982 return true;;
4983 }
4984 else if (attr2_len + 4 == attr1_len)
4985 {
4986 const char *p = IDENTIFIER_POINTER (attr2);
4987 const char *q = IDENTIFIER_POINTER (attr1);
4988 if (q[0] == '_' && q[1] == '_'
4989 && q[attr1_len - 2] == '_' && q[attr1_len - 1] == '_'
4990 && strncmp (q + 2, p, attr2_len) == 0)
4991 return true;
4992 }
4993
4994 return false;
4995 }
4996
4997 /* Compare two attributes for their value identity. Return true if the
4998 attribute values are known to be equal; otherwise return false. */
4999
5000 bool
5001 attribute_value_equal (const_tree attr1, const_tree attr2)
5002 {
5003 if (TREE_VALUE (attr1) == TREE_VALUE (attr2))
5004 return true;
5005
5006 if (TREE_VALUE (attr1) != NULL_TREE
5007 && TREE_CODE (TREE_VALUE (attr1)) == TREE_LIST
5008 && TREE_VALUE (attr2) != NULL_TREE
5009 && TREE_CODE (TREE_VALUE (attr2)) == TREE_LIST)
5010 {
5011 /* Handle attribute format. */
5012 if (is_attribute_p ("format", get_attribute_name (attr1)))
5013 {
5014 attr1 = TREE_VALUE (attr1);
5015 attr2 = TREE_VALUE (attr2);
5016 /* Compare the archetypes (printf/scanf/strftime/...). */
5017 if (!cmp_attrib_identifiers (TREE_VALUE (attr1),
5018 TREE_VALUE (attr2)))
5019 return false;
5020 /* Archetypes are the same. Compare the rest. */
5021 return (simple_cst_list_equal (TREE_CHAIN (attr1),
5022 TREE_CHAIN (attr2)) == 1);
5023 }
5024 return (simple_cst_list_equal (TREE_VALUE (attr1),
5025 TREE_VALUE (attr2)) == 1);
5026 }
5027
5028 if ((flag_openmp || flag_openmp_simd)
5029 && TREE_VALUE (attr1) && TREE_VALUE (attr2)
5030 && TREE_CODE (TREE_VALUE (attr1)) == OMP_CLAUSE
5031 && TREE_CODE (TREE_VALUE (attr2)) == OMP_CLAUSE)
5032 return omp_declare_simd_clauses_equal (TREE_VALUE (attr1),
5033 TREE_VALUE (attr2));
5034
5035 return (simple_cst_equal (TREE_VALUE (attr1), TREE_VALUE (attr2)) == 1);
5036 }
5037
5038 /* Return 0 if the attributes for two types are incompatible, 1 if they
5039 are compatible, and 2 if they are nearly compatible (which causes a
5040 warning to be generated). */
5041 int
5042 comp_type_attributes (const_tree type1, const_tree type2)
5043 {
5044 const_tree a1 = TYPE_ATTRIBUTES (type1);
5045 const_tree a2 = TYPE_ATTRIBUTES (type2);
5046 const_tree a;
5047
5048 if (a1 == a2)
5049 return 1;
5050 for (a = a1; a != NULL_TREE; a = TREE_CHAIN (a))
5051 {
5052 const struct attribute_spec *as;
5053 const_tree attr;
5054
5055 as = lookup_attribute_spec (get_attribute_name (a));
5056 if (!as || as->affects_type_identity == false)
5057 continue;
5058
5059 attr = lookup_attribute (as->name, CONST_CAST_TREE (a2));
5060 if (!attr || !attribute_value_equal (a, attr))
5061 break;
5062 }
5063 if (!a)
5064 {
5065 for (a = a2; a != NULL_TREE; a = TREE_CHAIN (a))
5066 {
5067 const struct attribute_spec *as;
5068
5069 as = lookup_attribute_spec (get_attribute_name (a));
5070 if (!as || as->affects_type_identity == false)
5071 continue;
5072
5073 if (!lookup_attribute (as->name, CONST_CAST_TREE (a1)))
5074 break;
5075 /* We don't need to compare trees again, as we did this
5076 already in first loop. */
5077 }
5078 /* All types - affecting identity - are equal, so
5079 there is no need to call target hook for comparison. */
5080 if (!a)
5081 return 1;
5082 }
5083 if (lookup_attribute ("transaction_safe", CONST_CAST_TREE (a)))
5084 return 0;
5085 /* As some type combinations - like default calling-convention - might
5086 be compatible, we have to call the target hook to get the final result. */
5087 return targetm.comp_type_attributes (type1, type2);
5088 }
5089
5090 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
5091 is ATTRIBUTE.
5092
5093 Record such modified types already made so we don't make duplicates. */
5094
5095 tree
5096 build_type_attribute_variant (tree ttype, tree attribute)
5097 {
5098 return build_type_attribute_qual_variant (ttype, attribute,
5099 TYPE_QUALS (ttype));
5100 }
5101
5102
5103 /* Reset the expression *EXPR_P, a size or position.
5104
5105 ??? We could reset all non-constant sizes or positions. But it's cheap
5106 enough to not do so and refrain from adding workarounds to dwarf2out.c.
5107
5108 We need to reset self-referential sizes or positions because they cannot
5109 be gimplified and thus can contain a CALL_EXPR after the gimplification
5110 is finished, which will run afoul of LTO streaming. And they need to be
5111 reset to something essentially dummy but not constant, so as to preserve
5112 the properties of the object they are attached to. */
5113
5114 static inline void
5115 free_lang_data_in_one_sizepos (tree *expr_p)
5116 {
5117 tree expr = *expr_p;
5118 if (CONTAINS_PLACEHOLDER_P (expr))
5119 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
5120 }
5121
5122
5123 /* Reset all the fields in a binfo node BINFO. We only keep
5124 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
5125
5126 static void
5127 free_lang_data_in_binfo (tree binfo)
5128 {
5129 unsigned i;
5130 tree t;
5131
5132 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
5133
5134 BINFO_VIRTUALS (binfo) = NULL_TREE;
5135 BINFO_BASE_ACCESSES (binfo) = NULL;
5136 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
5137 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
5138
5139 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
5140 free_lang_data_in_binfo (t);
5141 }
5142
5143
5144 /* Reset all language specific information still present in TYPE. */
5145
5146 static void
5147 free_lang_data_in_type (tree type)
5148 {
5149 gcc_assert (TYPE_P (type));
5150
5151 /* Give the FE a chance to remove its own data first. */
5152 lang_hooks.free_lang_data (type);
5153
5154 TREE_LANG_FLAG_0 (type) = 0;
5155 TREE_LANG_FLAG_1 (type) = 0;
5156 TREE_LANG_FLAG_2 (type) = 0;
5157 TREE_LANG_FLAG_3 (type) = 0;
5158 TREE_LANG_FLAG_4 (type) = 0;
5159 TREE_LANG_FLAG_5 (type) = 0;
5160 TREE_LANG_FLAG_6 (type) = 0;
5161
5162 if (TREE_CODE (type) == FUNCTION_TYPE)
5163 {
5164 /* Remove the const and volatile qualifiers from arguments. The
5165 C++ front end removes them, but the C front end does not,
5166 leading to false ODR violation errors when merging two
5167 instances of the same function signature compiled by
5168 different front ends. */
5169 tree p;
5170
5171 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5172 {
5173 tree arg_type = TREE_VALUE (p);
5174
5175 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
5176 {
5177 int quals = TYPE_QUALS (arg_type)
5178 & ~TYPE_QUAL_CONST
5179 & ~TYPE_QUAL_VOLATILE;
5180 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
5181 free_lang_data_in_type (TREE_VALUE (p));
5182 }
5183 /* C++ FE uses TREE_PURPOSE to store initial values. */
5184 TREE_PURPOSE (p) = NULL;
5185 }
5186 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
5187 TYPE_MINVAL (type) = NULL;
5188 }
5189 if (TREE_CODE (type) == METHOD_TYPE)
5190 {
5191 tree p;
5192
5193 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5194 {
5195 /* C++ FE uses TREE_PURPOSE to store initial values. */
5196 TREE_PURPOSE (p) = NULL;
5197 }
5198 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
5199 TYPE_MINVAL (type) = NULL;
5200 }
5201
5202 /* Remove members that are not actually FIELD_DECLs from the field
5203 list of an aggregate. These occur in C++. */
5204 if (RECORD_OR_UNION_TYPE_P (type))
5205 {
5206 tree prev, member;
5207
5208 /* Note that TYPE_FIELDS can be shared across distinct
5209 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is
5210 to be removed, we cannot set its TREE_CHAIN to NULL.
5211 Otherwise, we would not be able to find all the other fields
5212 in the other instances of this TREE_TYPE.
5213
5214 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */
5215 prev = NULL_TREE;
5216 member = TYPE_FIELDS (type);
5217 while (member)
5218 {
5219 if (TREE_CODE (member) == FIELD_DECL
5220 || (TREE_CODE (member) == TYPE_DECL
5221 && !DECL_IGNORED_P (member)
5222 && debug_info_level > DINFO_LEVEL_TERSE
5223 && !is_redundant_typedef (member)))
5224 {
5225 if (prev)
5226 TREE_CHAIN (prev) = member;
5227 else
5228 TYPE_FIELDS (type) = member;
5229 prev = member;
5230 }
5231
5232 member = TREE_CHAIN (member);
5233 }
5234
5235 if (prev)
5236 TREE_CHAIN (prev) = NULL_TREE;
5237 else
5238 TYPE_FIELDS (type) = NULL_TREE;
5239
5240 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
5241 and danagle the pointer from time to time. */
5242 if (TYPE_VFIELD (type) && TREE_CODE (TYPE_VFIELD (type)) != FIELD_DECL)
5243 TYPE_VFIELD (type) = NULL_TREE;
5244
5245 /* Remove TYPE_METHODS list. While it would be nice to keep it
5246 to enable ODR warnings about different method lists, doing so
5247 seems to impractically increase size of LTO data streamed.
5248 Keep the information if TYPE_METHODS was non-NULL. This is used
5249 by function.c and pretty printers. */
5250 if (TYPE_METHODS (type))
5251 TYPE_METHODS (type) = error_mark_node;
5252 if (TYPE_BINFO (type))
5253 {
5254 free_lang_data_in_binfo (TYPE_BINFO (type));
5255 /* We need to preserve link to bases and virtual table for all
5256 polymorphic types to make devirtualization machinery working.
5257 Debug output cares only about bases, but output also
5258 virtual table pointers so merging of -fdevirtualize and
5259 -fno-devirtualize units is easier. */
5260 if ((!BINFO_VTABLE (TYPE_BINFO (type))
5261 || !flag_devirtualize)
5262 && ((!BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
5263 && !BINFO_VTABLE (TYPE_BINFO (type)))
5264 || debug_info_level != DINFO_LEVEL_NONE))
5265 TYPE_BINFO (type) = NULL;
5266 }
5267 }
5268 else
5269 {
5270 /* For non-aggregate types, clear out the language slot (which
5271 overloads TYPE_BINFO). */
5272 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
5273
5274 if (INTEGRAL_TYPE_P (type)
5275 || SCALAR_FLOAT_TYPE_P (type)
5276 || FIXED_POINT_TYPE_P (type))
5277 {
5278 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
5279 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
5280 }
5281 }
5282
5283 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
5284 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
5285
5286 if (TYPE_CONTEXT (type)
5287 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
5288 {
5289 tree ctx = TYPE_CONTEXT (type);
5290 do
5291 {
5292 ctx = BLOCK_SUPERCONTEXT (ctx);
5293 }
5294 while (ctx && TREE_CODE (ctx) == BLOCK);
5295 TYPE_CONTEXT (type) = ctx;
5296 }
5297 }
5298
5299
5300 /* Return true if DECL may need an assembler name to be set. */
5301
5302 static inline bool
5303 need_assembler_name_p (tree decl)
5304 {
5305 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
5306 Rule merging. This makes type_odr_p to return true on those types during
5307 LTO and by comparing the mangled name, we can say what types are intended
5308 to be equivalent across compilation unit.
5309
5310 We do not store names of type_in_anonymous_namespace_p.
5311
5312 Record, union and enumeration type have linkage that allows use
5313 to check type_in_anonymous_namespace_p. We do not mangle compound types
5314 that always can be compared structurally.
5315
5316 Similarly for builtin types, we compare properties of their main variant.
5317 A special case are integer types where mangling do make differences
5318 between char/signed char/unsigned char etc. Storing name for these makes
5319 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
5320 See cp/mangle.c:write_builtin_type for details. */
5321
5322 if (flag_lto_odr_type_mering
5323 && TREE_CODE (decl) == TYPE_DECL
5324 && DECL_NAME (decl)
5325 && decl == TYPE_NAME (TREE_TYPE (decl))
5326 && TYPE_MAIN_VARIANT (TREE_TYPE (decl)) == TREE_TYPE (decl)
5327 && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
5328 && (type_with_linkage_p (TREE_TYPE (decl))
5329 || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
5330 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
5331 return !DECL_ASSEMBLER_NAME_SET_P (decl);
5332 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5333 if (TREE_CODE (decl) != FUNCTION_DECL
5334 && TREE_CODE (decl) != VAR_DECL)
5335 return false;
5336
5337 /* If DECL already has its assembler name set, it does not need a
5338 new one. */
5339 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5340 || DECL_ASSEMBLER_NAME_SET_P (decl))
5341 return false;
5342
5343 /* Abstract decls do not need an assembler name. */
5344 if (DECL_ABSTRACT_P (decl))
5345 return false;
5346
5347 /* For VAR_DECLs, only static, public and external symbols need an
5348 assembler name. */
5349 if (TREE_CODE (decl) == VAR_DECL
5350 && !TREE_STATIC (decl)
5351 && !TREE_PUBLIC (decl)
5352 && !DECL_EXTERNAL (decl))
5353 return false;
5354
5355 if (TREE_CODE (decl) == FUNCTION_DECL)
5356 {
5357 /* Do not set assembler name on builtins. Allow RTL expansion to
5358 decide whether to expand inline or via a regular call. */
5359 if (DECL_BUILT_IN (decl)
5360 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5361 return false;
5362
5363 /* Functions represented in the callgraph need an assembler name. */
5364 if (cgraph_node::get (decl) != NULL)
5365 return true;
5366
5367 /* Unused and not public functions don't need an assembler name. */
5368 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5369 return false;
5370 }
5371
5372 return true;
5373 }
5374
5375
5376 /* Reset all language specific information still present in symbol
5377 DECL. */
5378
5379 static void
5380 free_lang_data_in_decl (tree decl)
5381 {
5382 gcc_assert (DECL_P (decl));
5383
5384 /* Give the FE a chance to remove its own data first. */
5385 lang_hooks.free_lang_data (decl);
5386
5387 TREE_LANG_FLAG_0 (decl) = 0;
5388 TREE_LANG_FLAG_1 (decl) = 0;
5389 TREE_LANG_FLAG_2 (decl) = 0;
5390 TREE_LANG_FLAG_3 (decl) = 0;
5391 TREE_LANG_FLAG_4 (decl) = 0;
5392 TREE_LANG_FLAG_5 (decl) = 0;
5393 TREE_LANG_FLAG_6 (decl) = 0;
5394
5395 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5396 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5397 if (TREE_CODE (decl) == FIELD_DECL)
5398 {
5399 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5400 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5401 DECL_QUALIFIER (decl) = NULL_TREE;
5402 }
5403
5404 if (TREE_CODE (decl) == FUNCTION_DECL)
5405 {
5406 struct cgraph_node *node;
5407 if (!(node = cgraph_node::get (decl))
5408 || (!node->definition && !node->clones))
5409 {
5410 if (node)
5411 node->release_body ();
5412 else
5413 {
5414 release_function_body (decl);
5415 DECL_ARGUMENTS (decl) = NULL;
5416 DECL_RESULT (decl) = NULL;
5417 DECL_INITIAL (decl) = error_mark_node;
5418 }
5419 }
5420 if (gimple_has_body_p (decl) || (node && node->thunk.thunk_p))
5421 {
5422 tree t;
5423
5424 /* If DECL has a gimple body, then the context for its
5425 arguments must be DECL. Otherwise, it doesn't really
5426 matter, as we will not be emitting any code for DECL. In
5427 general, there may be other instances of DECL created by
5428 the front end and since PARM_DECLs are generally shared,
5429 their DECL_CONTEXT changes as the replicas of DECL are
5430 created. The only time where DECL_CONTEXT is important
5431 is for the FUNCTION_DECLs that have a gimple body (since
5432 the PARM_DECL will be used in the function's body). */
5433 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5434 DECL_CONTEXT (t) = decl;
5435 if (!DECL_FUNCTION_SPECIFIC_TARGET (decl))
5436 DECL_FUNCTION_SPECIFIC_TARGET (decl)
5437 = target_option_default_node;
5438 if (!DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl))
5439 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
5440 = optimization_default_node;
5441 }
5442
5443 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5444 At this point, it is not needed anymore. */
5445 DECL_SAVED_TREE (decl) = NULL_TREE;
5446
5447 /* Clear the abstract origin if it refers to a method. Otherwise
5448 dwarf2out.c will ICE as we clear TYPE_METHODS and thus the
5449 origin will not be output correctly. */
5450 if (DECL_ABSTRACT_ORIGIN (decl)
5451 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5452 && RECORD_OR_UNION_TYPE_P
5453 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5454 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5455
5456 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5457 DECL_VINDEX referring to itself into a vtable slot number as it
5458 should. Happens with functions that are copied and then forgotten
5459 about. Just clear it, it won't matter anymore. */
5460 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl)))
5461 DECL_VINDEX (decl) = NULL_TREE;
5462 }
5463 else if (TREE_CODE (decl) == VAR_DECL)
5464 {
5465 if ((DECL_EXTERNAL (decl)
5466 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5467 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5468 DECL_INITIAL (decl) = NULL_TREE;
5469 }
5470 else if (TREE_CODE (decl) == TYPE_DECL)
5471 {
5472 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5473 DECL_VISIBILITY_SPECIFIED (decl) = 0;
5474 DECL_INITIAL (decl) = NULL_TREE;
5475 }
5476 else if (TREE_CODE (decl) == FIELD_DECL)
5477 DECL_INITIAL (decl) = NULL_TREE;
5478 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5479 && DECL_INITIAL (decl)
5480 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5481 {
5482 /* Strip builtins from the translation-unit BLOCK. We still have targets
5483 without builtin_decl_explicit support and also builtins are shared
5484 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5485 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5486 while (*nextp)
5487 {
5488 tree var = *nextp;
5489 if (TREE_CODE (var) == FUNCTION_DECL
5490 && DECL_BUILT_IN (var))
5491 *nextp = TREE_CHAIN (var);
5492 else
5493 nextp = &TREE_CHAIN (var);
5494 }
5495 }
5496 }
5497
5498
5499 /* Data used when collecting DECLs and TYPEs for language data removal. */
5500
5501 struct free_lang_data_d
5502 {
5503 free_lang_data_d () : decls (100), types (100) {}
5504
5505 /* Worklist to avoid excessive recursion. */
5506 auto_vec<tree> worklist;
5507
5508 /* Set of traversed objects. Used to avoid duplicate visits. */
5509 hash_set<tree> pset;
5510
5511 /* Array of symbols to process with free_lang_data_in_decl. */
5512 auto_vec<tree> decls;
5513
5514 /* Array of types to process with free_lang_data_in_type. */
5515 auto_vec<tree> types;
5516 };
5517
5518
5519 /* Save all language fields needed to generate proper debug information
5520 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5521
5522 static void
5523 save_debug_info_for_decl (tree t)
5524 {
5525 /*struct saved_debug_info_d *sdi;*/
5526
5527 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t));
5528
5529 /* FIXME. Partial implementation for saving debug info removed. */
5530 }
5531
5532
5533 /* Save all language fields needed to generate proper debug information
5534 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5535
5536 static void
5537 save_debug_info_for_type (tree t)
5538 {
5539 /*struct saved_debug_info_d *sdi;*/
5540
5541 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t));
5542
5543 /* FIXME. Partial implementation for saving debug info removed. */
5544 }
5545
5546
5547 /* Add type or decl T to one of the list of tree nodes that need their
5548 language data removed. The lists are held inside FLD. */
5549
5550 static void
5551 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5552 {
5553 if (DECL_P (t))
5554 {
5555 fld->decls.safe_push (t);
5556 if (debug_info_level > DINFO_LEVEL_TERSE)
5557 save_debug_info_for_decl (t);
5558 }
5559 else if (TYPE_P (t))
5560 {
5561 fld->types.safe_push (t);
5562 if (debug_info_level > DINFO_LEVEL_TERSE)
5563 save_debug_info_for_type (t);
5564 }
5565 else
5566 gcc_unreachable ();
5567 }
5568
5569 /* Push tree node T into FLD->WORKLIST. */
5570
5571 static inline void
5572 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5573 {
5574 if (t && !is_lang_specific (t) && !fld->pset.contains (t))
5575 fld->worklist.safe_push ((t));
5576 }
5577
5578
5579 /* Operand callback helper for free_lang_data_in_node. *TP is the
5580 subtree operand being considered. */
5581
5582 static tree
5583 find_decls_types_r (tree *tp, int *ws, void *data)
5584 {
5585 tree t = *tp;
5586 struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5587
5588 if (TREE_CODE (t) == TREE_LIST)
5589 return NULL_TREE;
5590
5591 /* Language specific nodes will be removed, so there is no need
5592 to gather anything under them. */
5593 if (is_lang_specific (t))
5594 {
5595 *ws = 0;
5596 return NULL_TREE;
5597 }
5598
5599 if (DECL_P (t))
5600 {
5601 /* Note that walk_tree does not traverse every possible field in
5602 decls, so we have to do our own traversals here. */
5603 add_tree_to_fld_list (t, fld);
5604
5605 fld_worklist_push (DECL_NAME (t), fld);
5606 fld_worklist_push (DECL_CONTEXT (t), fld);
5607 fld_worklist_push (DECL_SIZE (t), fld);
5608 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5609
5610 /* We are going to remove everything under DECL_INITIAL for
5611 TYPE_DECLs. No point walking them. */
5612 if (TREE_CODE (t) != TYPE_DECL)
5613 fld_worklist_push (DECL_INITIAL (t), fld);
5614
5615 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5616 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5617
5618 if (TREE_CODE (t) == FUNCTION_DECL)
5619 {
5620 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5621 fld_worklist_push (DECL_RESULT (t), fld);
5622 }
5623 else if (TREE_CODE (t) == TYPE_DECL)
5624 {
5625 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
5626 }
5627 else if (TREE_CODE (t) == FIELD_DECL)
5628 {
5629 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5630 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5631 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5632 fld_worklist_push (DECL_FCONTEXT (t), fld);
5633 }
5634
5635 if ((TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
5636 && DECL_HAS_VALUE_EXPR_P (t))
5637 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5638
5639 if (TREE_CODE (t) != FIELD_DECL
5640 && TREE_CODE (t) != TYPE_DECL)
5641 fld_worklist_push (TREE_CHAIN (t), fld);
5642 *ws = 0;
5643 }
5644 else if (TYPE_P (t))
5645 {
5646 /* Note that walk_tree does not traverse every possible field in
5647 types, so we have to do our own traversals here. */
5648 add_tree_to_fld_list (t, fld);
5649
5650 if (!RECORD_OR_UNION_TYPE_P (t))
5651 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5652 fld_worklist_push (TYPE_SIZE (t), fld);
5653 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5654 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5655 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5656 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5657 fld_worklist_push (TYPE_NAME (t), fld);
5658 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5659 them and thus do not and want not to reach unused pointer types
5660 this way. */
5661 if (!POINTER_TYPE_P (t))
5662 fld_worklist_push (TYPE_MINVAL (t), fld);
5663 if (!RECORD_OR_UNION_TYPE_P (t))
5664 fld_worklist_push (TYPE_MAXVAL (t), fld);
5665 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5666 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5667 do not and want not to reach unused variants this way. */
5668 if (TYPE_CONTEXT (t))
5669 {
5670 tree ctx = TYPE_CONTEXT (t);
5671 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5672 So push that instead. */
5673 while (ctx && TREE_CODE (ctx) == BLOCK)
5674 ctx = BLOCK_SUPERCONTEXT (ctx);
5675 fld_worklist_push (ctx, fld);
5676 }
5677 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5678 and want not to reach unused types this way. */
5679
5680 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5681 {
5682 unsigned i;
5683 tree tem;
5684 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5685 fld_worklist_push (TREE_TYPE (tem), fld);
5686 tem = BINFO_VIRTUALS (TYPE_BINFO (t));
5687 if (tem
5688 /* The Java FE overloads BINFO_VIRTUALS for its own purpose. */
5689 && TREE_CODE (tem) == TREE_LIST)
5690 do
5691 {
5692 fld_worklist_push (TREE_VALUE (tem), fld);
5693 tem = TREE_CHAIN (tem);
5694 }
5695 while (tem);
5696 }
5697 if (RECORD_OR_UNION_TYPE_P (t))
5698 {
5699 tree tem;
5700 /* Push all TYPE_FIELDS - there can be interleaving interesting
5701 and non-interesting things. */
5702 tem = TYPE_FIELDS (t);
5703 while (tem)
5704 {
5705 if (TREE_CODE (tem) == FIELD_DECL
5706 || (TREE_CODE (tem) == TYPE_DECL
5707 && !DECL_IGNORED_P (tem)
5708 && debug_info_level > DINFO_LEVEL_TERSE
5709 && !is_redundant_typedef (tem)))
5710 fld_worklist_push (tem, fld);
5711 tem = TREE_CHAIN (tem);
5712 }
5713 }
5714
5715 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5716 *ws = 0;
5717 }
5718 else if (TREE_CODE (t) == BLOCK)
5719 {
5720 tree tem;
5721 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
5722 fld_worklist_push (tem, fld);
5723 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5724 fld_worklist_push (tem, fld);
5725 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5726 }
5727
5728 if (TREE_CODE (t) != IDENTIFIER_NODE
5729 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5730 fld_worklist_push (TREE_TYPE (t), fld);
5731
5732 return NULL_TREE;
5733 }
5734
5735
5736 /* Find decls and types in T. */
5737
5738 static void
5739 find_decls_types (tree t, struct free_lang_data_d *fld)
5740 {
5741 while (1)
5742 {
5743 if (!fld->pset.contains (t))
5744 walk_tree (&t, find_decls_types_r, fld, &fld->pset);
5745 if (fld->worklist.is_empty ())
5746 break;
5747 t = fld->worklist.pop ();
5748 }
5749 }
5750
5751 /* Translate all the types in LIST with the corresponding runtime
5752 types. */
5753
5754 static tree
5755 get_eh_types_for_runtime (tree list)
5756 {
5757 tree head, prev;
5758
5759 if (list == NULL_TREE)
5760 return NULL_TREE;
5761
5762 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5763 prev = head;
5764 list = TREE_CHAIN (list);
5765 while (list)
5766 {
5767 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5768 TREE_CHAIN (prev) = n;
5769 prev = TREE_CHAIN (prev);
5770 list = TREE_CHAIN (list);
5771 }
5772
5773 return head;
5774 }
5775
5776
5777 /* Find decls and types referenced in EH region R and store them in
5778 FLD->DECLS and FLD->TYPES. */
5779
5780 static void
5781 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5782 {
5783 switch (r->type)
5784 {
5785 case ERT_CLEANUP:
5786 break;
5787
5788 case ERT_TRY:
5789 {
5790 eh_catch c;
5791
5792 /* The types referenced in each catch must first be changed to the
5793 EH types used at runtime. This removes references to FE types
5794 in the region. */
5795 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5796 {
5797 c->type_list = get_eh_types_for_runtime (c->type_list);
5798 walk_tree (&c->type_list, find_decls_types_r, fld, &fld->pset);
5799 }
5800 }
5801 break;
5802
5803 case ERT_ALLOWED_EXCEPTIONS:
5804 r->u.allowed.type_list
5805 = get_eh_types_for_runtime (r->u.allowed.type_list);
5806 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, &fld->pset);
5807 break;
5808
5809 case ERT_MUST_NOT_THROW:
5810 walk_tree (&r->u.must_not_throw.failure_decl,
5811 find_decls_types_r, fld, &fld->pset);
5812 break;
5813 }
5814 }
5815
5816
5817 /* Find decls and types referenced in cgraph node N and store them in
5818 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5819 look for *every* kind of DECL and TYPE node reachable from N,
5820 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5821 NAMESPACE_DECLs, etc). */
5822
5823 static void
5824 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5825 {
5826 basic_block bb;
5827 struct function *fn;
5828 unsigned ix;
5829 tree t;
5830
5831 find_decls_types (n->decl, fld);
5832
5833 if (!gimple_has_body_p (n->decl))
5834 return;
5835
5836 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5837
5838 fn = DECL_STRUCT_FUNCTION (n->decl);
5839
5840 /* Traverse locals. */
5841 FOR_EACH_LOCAL_DECL (fn, ix, t)
5842 find_decls_types (t, fld);
5843
5844 /* Traverse EH regions in FN. */
5845 {
5846 eh_region r;
5847 FOR_ALL_EH_REGION_FN (r, fn)
5848 find_decls_types_in_eh_region (r, fld);
5849 }
5850
5851 /* Traverse every statement in FN. */
5852 FOR_EACH_BB_FN (bb, fn)
5853 {
5854 gphi_iterator psi;
5855 gimple_stmt_iterator si;
5856 unsigned i;
5857
5858 for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
5859 {
5860 gphi *phi = psi.phi ();
5861
5862 for (i = 0; i < gimple_phi_num_args (phi); i++)
5863 {
5864 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5865 find_decls_types (*arg_p, fld);
5866 }
5867 }
5868
5869 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5870 {
5871 gimple *stmt = gsi_stmt (si);
5872
5873 if (is_gimple_call (stmt))
5874 find_decls_types (gimple_call_fntype (stmt), fld);
5875
5876 for (i = 0; i < gimple_num_ops (stmt); i++)
5877 {
5878 tree arg = gimple_op (stmt, i);
5879 find_decls_types (arg, fld);
5880 }
5881 }
5882 }
5883 }
5884
5885
5886 /* Find decls and types referenced in varpool node N and store them in
5887 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5888 look for *every* kind of DECL and TYPE node reachable from N,
5889 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5890 NAMESPACE_DECLs, etc). */
5891
5892 static void
5893 find_decls_types_in_var (varpool_node *v, struct free_lang_data_d *fld)
5894 {
5895 find_decls_types (v->decl, fld);
5896 }
5897
5898 /* If T needs an assembler name, have one created for it. */
5899
5900 void
5901 assign_assembler_name_if_neeeded (tree t)
5902 {
5903 if (need_assembler_name_p (t))
5904 {
5905 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5906 diagnostics that use input_location to show locus
5907 information. The problem here is that, at this point,
5908 input_location is generally anchored to the end of the file
5909 (since the parser is long gone), so we don't have a good
5910 position to pin it to.
5911
5912 To alleviate this problem, this uses the location of T's
5913 declaration. Examples of this are
5914 testsuite/g++.dg/template/cond2.C and
5915 testsuite/g++.dg/template/pr35240.C. */
5916 location_t saved_location = input_location;
5917 input_location = DECL_SOURCE_LOCATION (t);
5918
5919 decl_assembler_name (t);
5920
5921 input_location = saved_location;
5922 }
5923 }
5924
5925
5926 /* Free language specific information for every operand and expression
5927 in every node of the call graph. This process operates in three stages:
5928
5929 1- Every callgraph node and varpool node is traversed looking for
5930 decls and types embedded in them. This is a more exhaustive
5931 search than that done by find_referenced_vars, because it will
5932 also collect individual fields, decls embedded in types, etc.
5933
5934 2- All the decls found are sent to free_lang_data_in_decl.
5935
5936 3- All the types found are sent to free_lang_data_in_type.
5937
5938 The ordering between decls and types is important because
5939 free_lang_data_in_decl sets assembler names, which includes
5940 mangling. So types cannot be freed up until assembler names have
5941 been set up. */
5942
5943 static void
5944 free_lang_data_in_cgraph (void)
5945 {
5946 struct cgraph_node *n;
5947 varpool_node *v;
5948 struct free_lang_data_d fld;
5949 tree t;
5950 unsigned i;
5951 alias_pair *p;
5952
5953 /* Find decls and types in the body of every function in the callgraph. */
5954 FOR_EACH_FUNCTION (n)
5955 find_decls_types_in_node (n, &fld);
5956
5957 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
5958 find_decls_types (p->decl, &fld);
5959
5960 /* Find decls and types in every varpool symbol. */
5961 FOR_EACH_VARIABLE (v)
5962 find_decls_types_in_var (v, &fld);
5963
5964 /* Set the assembler name on every decl found. We need to do this
5965 now because free_lang_data_in_decl will invalidate data needed
5966 for mangling. This breaks mangling on interdependent decls. */
5967 FOR_EACH_VEC_ELT (fld.decls, i, t)
5968 assign_assembler_name_if_neeeded (t);
5969
5970 /* Traverse every decl found freeing its language data. */
5971 FOR_EACH_VEC_ELT (fld.decls, i, t)
5972 free_lang_data_in_decl (t);
5973
5974 /* Traverse every type found freeing its language data. */
5975 FOR_EACH_VEC_ELT (fld.types, i, t)
5976 free_lang_data_in_type (t);
5977 if (flag_checking)
5978 {
5979 FOR_EACH_VEC_ELT (fld.types, i, t)
5980 verify_type (t);
5981 }
5982 }
5983
5984
5985 /* Free resources that are used by FE but are not needed once they are done. */
5986
5987 static unsigned
5988 free_lang_data (void)
5989 {
5990 unsigned i;
5991
5992 /* If we are the LTO frontend we have freed lang-specific data already. */
5993 if (in_lto_p
5994 || (!flag_generate_lto && !flag_generate_offload))
5995 return 0;
5996
5997 /* Allocate and assign alias sets to the standard integer types
5998 while the slots are still in the way the frontends generated them. */
5999 for (i = 0; i < itk_none; ++i)
6000 if (integer_types[i])
6001 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
6002
6003 /* Traverse the IL resetting language specific information for
6004 operands, expressions, etc. */
6005 free_lang_data_in_cgraph ();
6006
6007 /* Create gimple variants for common types. */
6008 ptrdiff_type_node = integer_type_node;
6009 fileptr_type_node = ptr_type_node;
6010
6011 /* Reset some langhooks. Do not reset types_compatible_p, it may
6012 still be used indirectly via the get_alias_set langhook. */
6013 lang_hooks.dwarf_name = lhd_dwarf_name;
6014 lang_hooks.decl_printable_name = gimple_decl_printable_name;
6015 lang_hooks.gimplify_expr = lhd_gimplify_expr;
6016
6017 /* We do not want the default decl_assembler_name implementation,
6018 rather if we have fixed everything we want a wrapper around it
6019 asserting that all non-local symbols already got their assembler
6020 name and only produce assembler names for local symbols. Or rather
6021 make sure we never call decl_assembler_name on local symbols and
6022 devise a separate, middle-end private scheme for it. */
6023
6024 /* Reset diagnostic machinery. */
6025 tree_diagnostics_defaults (global_dc);
6026
6027 return 0;
6028 }
6029
6030
6031 namespace {
6032
6033 const pass_data pass_data_ipa_free_lang_data =
6034 {
6035 SIMPLE_IPA_PASS, /* type */
6036 "*free_lang_data", /* name */
6037 OPTGROUP_NONE, /* optinfo_flags */
6038 TV_IPA_FREE_LANG_DATA, /* tv_id */
6039 0, /* properties_required */
6040 0, /* properties_provided */
6041 0, /* properties_destroyed */
6042 0, /* todo_flags_start */
6043 0, /* todo_flags_finish */
6044 };
6045
6046 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
6047 {
6048 public:
6049 pass_ipa_free_lang_data (gcc::context *ctxt)
6050 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
6051 {}
6052
6053 /* opt_pass methods: */
6054 virtual unsigned int execute (function *) { return free_lang_data (); }
6055
6056 }; // class pass_ipa_free_lang_data
6057
6058 } // anon namespace
6059
6060 simple_ipa_opt_pass *
6061 make_pass_ipa_free_lang_data (gcc::context *ctxt)
6062 {
6063 return new pass_ipa_free_lang_data (ctxt);
6064 }
6065
6066 /* The backbone of is_attribute_p(). ATTR_LEN is the string length of
6067 ATTR_NAME. Also used internally by remove_attribute(). */
6068 bool
6069 private_is_attribute_p (const char *attr_name, size_t attr_len, const_tree ident)
6070 {
6071 size_t ident_len = IDENTIFIER_LENGTH (ident);
6072
6073 if (ident_len == attr_len)
6074 {
6075 if (strcmp (attr_name, IDENTIFIER_POINTER (ident)) == 0)
6076 return true;
6077 }
6078 else if (ident_len == attr_len + 4)
6079 {
6080 /* There is the possibility that ATTR is 'text' and IDENT is
6081 '__text__'. */
6082 const char *p = IDENTIFIER_POINTER (ident);
6083 if (p[0] == '_' && p[1] == '_'
6084 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
6085 && strncmp (attr_name, p + 2, attr_len) == 0)
6086 return true;
6087 }
6088
6089 return false;
6090 }
6091
6092 /* The backbone of lookup_attribute(). ATTR_LEN is the string length
6093 of ATTR_NAME, and LIST is not NULL_TREE. */
6094 tree
6095 private_lookup_attribute (const char *attr_name, size_t attr_len, tree list)
6096 {
6097 while (list)
6098 {
6099 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
6100
6101 if (ident_len == attr_len)
6102 {
6103 if (!strcmp (attr_name,
6104 IDENTIFIER_POINTER (get_attribute_name (list))))
6105 break;
6106 }
6107 /* TODO: If we made sure that attributes were stored in the
6108 canonical form without '__...__' (ie, as in 'text' as opposed
6109 to '__text__') then we could avoid the following case. */
6110 else if (ident_len == attr_len + 4)
6111 {
6112 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
6113 if (p[0] == '_' && p[1] == '_'
6114 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
6115 && strncmp (attr_name, p + 2, attr_len) == 0)
6116 break;
6117 }
6118 list = TREE_CHAIN (list);
6119 }
6120
6121 return list;
6122 }
6123
6124 /* Given an attribute name ATTR_NAME and a list of attributes LIST,
6125 return a pointer to the attribute's list first element if the attribute
6126 starts with ATTR_NAME. ATTR_NAME must be in the form 'text' (not
6127 '__text__'). */
6128
6129 tree
6130 private_lookup_attribute_by_prefix (const char *attr_name, size_t attr_len,
6131 tree list)
6132 {
6133 while (list)
6134 {
6135 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
6136
6137 if (attr_len > ident_len)
6138 {
6139 list = TREE_CHAIN (list);
6140 continue;
6141 }
6142
6143 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
6144
6145 if (strncmp (attr_name, p, attr_len) == 0)
6146 break;
6147
6148 /* TODO: If we made sure that attributes were stored in the
6149 canonical form without '__...__' (ie, as in 'text' as opposed
6150 to '__text__') then we could avoid the following case. */
6151 if (p[0] == '_' && p[1] == '_' &&
6152 strncmp (attr_name, p + 2, attr_len) == 0)
6153 break;
6154
6155 list = TREE_CHAIN (list);
6156 }
6157
6158 return list;
6159 }
6160
6161
6162 /* A variant of lookup_attribute() that can be used with an identifier
6163 as the first argument, and where the identifier can be either
6164 'text' or '__text__'.
6165
6166 Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST,
6167 return a pointer to the attribute's list element if the attribute
6168 is part of the list, or NULL_TREE if not found. If the attribute
6169 appears more than once, this only returns the first occurrence; the
6170 TREE_CHAIN of the return value should be passed back in if further
6171 occurrences are wanted. ATTR_IDENTIFIER must be an identifier but
6172 can be in the form 'text' or '__text__'. */
6173 static tree
6174 lookup_ident_attribute (tree attr_identifier, tree list)
6175 {
6176 gcc_checking_assert (TREE_CODE (attr_identifier) == IDENTIFIER_NODE);
6177
6178 while (list)
6179 {
6180 gcc_checking_assert (TREE_CODE (get_attribute_name (list))
6181 == IDENTIFIER_NODE);
6182
6183 if (cmp_attrib_identifiers (attr_identifier,
6184 get_attribute_name (list)))
6185 /* Found it. */
6186 break;
6187 list = TREE_CHAIN (list);
6188 }
6189
6190 return list;
6191 }
6192
6193 /* Remove any instances of attribute ATTR_NAME in LIST and return the
6194 modified list. */
6195
6196 tree
6197 remove_attribute (const char *attr_name, tree list)
6198 {
6199 tree *p;
6200 size_t attr_len = strlen (attr_name);
6201
6202 gcc_checking_assert (attr_name[0] != '_');
6203
6204 for (p = &list; *p; )
6205 {
6206 tree l = *p;
6207 /* TODO: If we were storing attributes in normalized form, here
6208 we could use a simple strcmp(). */
6209 if (private_is_attribute_p (attr_name, attr_len, get_attribute_name (l)))
6210 *p = TREE_CHAIN (l);
6211 else
6212 p = &TREE_CHAIN (l);
6213 }
6214
6215 return list;
6216 }
6217
6218 /* Return an attribute list that is the union of a1 and a2. */
6219
6220 tree
6221 merge_attributes (tree a1, tree a2)
6222 {
6223 tree attributes;
6224
6225 /* Either one unset? Take the set one. */
6226
6227 if ((attributes = a1) == 0)
6228 attributes = a2;
6229
6230 /* One that completely contains the other? Take it. */
6231
6232 else if (a2 != 0 && ! attribute_list_contained (a1, a2))
6233 {
6234 if (attribute_list_contained (a2, a1))
6235 attributes = a2;
6236 else
6237 {
6238 /* Pick the longest list, and hang on the other list. */
6239
6240 if (list_length (a1) < list_length (a2))
6241 attributes = a2, a2 = a1;
6242
6243 for (; a2 != 0; a2 = TREE_CHAIN (a2))
6244 {
6245 tree a;
6246 for (a = lookup_ident_attribute (get_attribute_name (a2),
6247 attributes);
6248 a != NULL_TREE && !attribute_value_equal (a, a2);
6249 a = lookup_ident_attribute (get_attribute_name (a2),
6250 TREE_CHAIN (a)))
6251 ;
6252 if (a == NULL_TREE)
6253 {
6254 a1 = copy_node (a2);
6255 TREE_CHAIN (a1) = attributes;
6256 attributes = a1;
6257 }
6258 }
6259 }
6260 }
6261 return attributes;
6262 }
6263
6264 /* Given types T1 and T2, merge their attributes and return
6265 the result. */
6266
6267 tree
6268 merge_type_attributes (tree t1, tree t2)
6269 {
6270 return merge_attributes (TYPE_ATTRIBUTES (t1),
6271 TYPE_ATTRIBUTES (t2));
6272 }
6273
6274 /* Given decls OLDDECL and NEWDECL, merge their attributes and return
6275 the result. */
6276
6277 tree
6278 merge_decl_attributes (tree olddecl, tree newdecl)
6279 {
6280 return merge_attributes (DECL_ATTRIBUTES (olddecl),
6281 DECL_ATTRIBUTES (newdecl));
6282 }
6283
6284 #if TARGET_DLLIMPORT_DECL_ATTRIBUTES
6285
6286 /* Specialization of merge_decl_attributes for various Windows targets.
6287
6288 This handles the following situation:
6289
6290 __declspec (dllimport) int foo;
6291 int foo;
6292
6293 The second instance of `foo' nullifies the dllimport. */
6294
6295 tree
6296 merge_dllimport_decl_attributes (tree old, tree new_tree)
6297 {
6298 tree a;
6299 int delete_dllimport_p = 1;
6300
6301 /* What we need to do here is remove from `old' dllimport if it doesn't
6302 appear in `new'. dllimport behaves like extern: if a declaration is
6303 marked dllimport and a definition appears later, then the object
6304 is not dllimport'd. We also remove a `new' dllimport if the old list
6305 contains dllexport: dllexport always overrides dllimport, regardless
6306 of the order of declaration. */
6307 if (!VAR_OR_FUNCTION_DECL_P (new_tree))
6308 delete_dllimport_p = 0;
6309 else if (DECL_DLLIMPORT_P (new_tree)
6310 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old)))
6311 {
6312 DECL_DLLIMPORT_P (new_tree) = 0;
6313 warning (OPT_Wattributes, "%q+D already declared with dllexport attribute: "
6314 "dllimport ignored", new_tree);
6315 }
6316 else if (DECL_DLLIMPORT_P (old) && !DECL_DLLIMPORT_P (new_tree))
6317 {
6318 /* Warn about overriding a symbol that has already been used, e.g.:
6319 extern int __attribute__ ((dllimport)) foo;
6320 int* bar () {return &foo;}
6321 int foo;
6322 */
6323 if (TREE_USED (old))
6324 {
6325 warning (0, "%q+D redeclared without dllimport attribute "
6326 "after being referenced with dll linkage", new_tree);
6327 /* If we have used a variable's address with dllimport linkage,
6328 keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the
6329 decl may already have had TREE_CONSTANT computed.
6330 We still remove the attribute so that assembler code refers
6331 to '&foo rather than '_imp__foo'. */
6332 if (TREE_CODE (old) == VAR_DECL && TREE_ADDRESSABLE (old))
6333 DECL_DLLIMPORT_P (new_tree) = 1;
6334 }
6335
6336 /* Let an inline definition silently override the external reference,
6337 but otherwise warn about attribute inconsistency. */
6338 else if (TREE_CODE (new_tree) == VAR_DECL
6339 || !DECL_DECLARED_INLINE_P (new_tree))
6340 warning (OPT_Wattributes, "%q+D redeclared without dllimport attribute: "
6341 "previous dllimport ignored", new_tree);
6342 }
6343 else
6344 delete_dllimport_p = 0;
6345
6346 a = merge_attributes (DECL_ATTRIBUTES (old), DECL_ATTRIBUTES (new_tree));
6347
6348 if (delete_dllimport_p)
6349 a = remove_attribute ("dllimport", a);
6350
6351 return a;
6352 }
6353
6354 /* Handle a "dllimport" or "dllexport" attribute; arguments as in
6355 struct attribute_spec.handler. */
6356
6357 tree
6358 handle_dll_attribute (tree * pnode, tree name, tree args, int flags,
6359 bool *no_add_attrs)
6360 {
6361 tree node = *pnode;
6362 bool is_dllimport;
6363
6364 /* These attributes may apply to structure and union types being created,
6365 but otherwise should pass to the declaration involved. */
6366 if (!DECL_P (node))
6367 {
6368 if (flags & ((int) ATTR_FLAG_DECL_NEXT | (int) ATTR_FLAG_FUNCTION_NEXT
6369 | (int) ATTR_FLAG_ARRAY_NEXT))
6370 {
6371 *no_add_attrs = true;
6372 return tree_cons (name, args, NULL_TREE);
6373 }
6374 if (TREE_CODE (node) == RECORD_TYPE
6375 || TREE_CODE (node) == UNION_TYPE)
6376 {
6377 node = TYPE_NAME (node);
6378 if (!node)
6379 return NULL_TREE;
6380 }
6381 else
6382 {
6383 warning (OPT_Wattributes, "%qE attribute ignored",
6384 name);
6385 *no_add_attrs = true;
6386 return NULL_TREE;
6387 }
6388 }
6389
6390 if (TREE_CODE (node) != FUNCTION_DECL
6391 && TREE_CODE (node) != VAR_DECL
6392 && TREE_CODE (node) != TYPE_DECL)
6393 {
6394 *no_add_attrs = true;
6395 warning (OPT_Wattributes, "%qE attribute ignored",
6396 name);
6397 return NULL_TREE;
6398 }
6399
6400 if (TREE_CODE (node) == TYPE_DECL
6401 && TREE_CODE (TREE_TYPE (node)) != RECORD_TYPE
6402 && TREE_CODE (TREE_TYPE (node)) != UNION_TYPE)
6403 {
6404 *no_add_attrs = true;
6405 warning (OPT_Wattributes, "%qE attribute ignored",
6406 name);
6407 return NULL_TREE;
6408 }
6409
6410 is_dllimport = is_attribute_p ("dllimport", name);
6411
6412 /* Report error on dllimport ambiguities seen now before they cause
6413 any damage. */
6414 if (is_dllimport)
6415 {
6416 /* Honor any target-specific overrides. */
6417 if (!targetm.valid_dllimport_attribute_p (node))
6418 *no_add_attrs = true;
6419
6420 else if (TREE_CODE (node) == FUNCTION_DECL
6421 && DECL_DECLARED_INLINE_P (node))
6422 {
6423 warning (OPT_Wattributes, "inline function %q+D declared as "
6424 " dllimport: attribute ignored", node);
6425 *no_add_attrs = true;
6426 }
6427 /* Like MS, treat definition of dllimported variables and
6428 non-inlined functions on declaration as syntax errors. */
6429 else if (TREE_CODE (node) == FUNCTION_DECL && DECL_INITIAL (node))
6430 {
6431 error ("function %q+D definition is marked dllimport", node);
6432 *no_add_attrs = true;
6433 }
6434
6435 else if (TREE_CODE (node) == VAR_DECL)
6436 {
6437 if (DECL_INITIAL (node))
6438 {
6439 error ("variable %q+D definition is marked dllimport",
6440 node);
6441 *no_add_attrs = true;
6442 }
6443
6444 /* `extern' needn't be specified with dllimport.
6445 Specify `extern' now and hope for the best. Sigh. */
6446 DECL_EXTERNAL (node) = 1;
6447 /* Also, implicitly give dllimport'd variables declared within
6448 a function global scope, unless declared static. */
6449 if (current_function_decl != NULL_TREE && !TREE_STATIC (node))
6450 TREE_PUBLIC (node) = 1;
6451 }
6452
6453 if (*no_add_attrs == false)
6454 DECL_DLLIMPORT_P (node) = 1;
6455 }
6456 else if (TREE_CODE (node) == FUNCTION_DECL
6457 && DECL_DECLARED_INLINE_P (node)
6458 && flag_keep_inline_dllexport)
6459 /* An exported function, even if inline, must be emitted. */
6460 DECL_EXTERNAL (node) = 0;
6461
6462 /* Report error if symbol is not accessible at global scope. */
6463 if (!TREE_PUBLIC (node)
6464 && (TREE_CODE (node) == VAR_DECL
6465 || TREE_CODE (node) == FUNCTION_DECL))
6466 {
6467 error ("external linkage required for symbol %q+D because of "
6468 "%qE attribute", node, name);
6469 *no_add_attrs = true;
6470 }
6471
6472 /* A dllexport'd entity must have default visibility so that other
6473 program units (shared libraries or the main executable) can see
6474 it. A dllimport'd entity must have default visibility so that
6475 the linker knows that undefined references within this program
6476 unit can be resolved by the dynamic linker. */
6477 if (!*no_add_attrs)
6478 {
6479 if (DECL_VISIBILITY_SPECIFIED (node)
6480 && DECL_VISIBILITY (node) != VISIBILITY_DEFAULT)
6481 error ("%qE implies default visibility, but %qD has already "
6482 "been declared with a different visibility",
6483 name, node);
6484 DECL_VISIBILITY (node) = VISIBILITY_DEFAULT;
6485 DECL_VISIBILITY_SPECIFIED (node) = 1;
6486 }
6487
6488 return NULL_TREE;
6489 }
6490
6491 #endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */
6492 \f
6493 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6494 of the various TYPE_QUAL values. */
6495
6496 static void
6497 set_type_quals (tree type, int type_quals)
6498 {
6499 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6500 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6501 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6502 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6503 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6504 }
6505
6506 /* Returns true iff unqualified CAND and BASE are equivalent. */
6507
6508 bool
6509 check_base_type (const_tree cand, const_tree base)
6510 {
6511 return (TYPE_NAME (cand) == TYPE_NAME (base)
6512 /* Apparently this is needed for Objective-C. */
6513 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6514 /* Check alignment. */
6515 && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
6516 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6517 TYPE_ATTRIBUTES (base)));
6518 }
6519
6520 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6521
6522 bool
6523 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6524 {
6525 return (TYPE_QUALS (cand) == type_quals
6526 && check_base_type (cand, base));
6527 }
6528
6529 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6530
6531 static bool
6532 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6533 {
6534 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6535 && TYPE_NAME (cand) == TYPE_NAME (base)
6536 /* Apparently this is needed for Objective-C. */
6537 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6538 /* Check alignment. */
6539 && TYPE_ALIGN (cand) == align
6540 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6541 TYPE_ATTRIBUTES (base)));
6542 }
6543
6544 /* This function checks to see if TYPE matches the size one of the built-in
6545 atomic types, and returns that core atomic type. */
6546
6547 static tree
6548 find_atomic_core_type (tree type)
6549 {
6550 tree base_atomic_type;
6551
6552 /* Only handle complete types. */
6553 if (TYPE_SIZE (type) == NULL_TREE)
6554 return NULL_TREE;
6555
6556 HOST_WIDE_INT type_size = tree_to_uhwi (TYPE_SIZE (type));
6557 switch (type_size)
6558 {
6559 case 8:
6560 base_atomic_type = atomicQI_type_node;
6561 break;
6562
6563 case 16:
6564 base_atomic_type = atomicHI_type_node;
6565 break;
6566
6567 case 32:
6568 base_atomic_type = atomicSI_type_node;
6569 break;
6570
6571 case 64:
6572 base_atomic_type = atomicDI_type_node;
6573 break;
6574
6575 case 128:
6576 base_atomic_type = atomicTI_type_node;
6577 break;
6578
6579 default:
6580 base_atomic_type = NULL_TREE;
6581 }
6582
6583 return base_atomic_type;
6584 }
6585
6586 /* Return a version of the TYPE, qualified as indicated by the
6587 TYPE_QUALS, if one exists. If no qualified version exists yet,
6588 return NULL_TREE. */
6589
6590 tree
6591 get_qualified_type (tree type, int type_quals)
6592 {
6593 tree t;
6594
6595 if (TYPE_QUALS (type) == type_quals)
6596 return type;
6597
6598 /* Search the chain of variants to see if there is already one there just
6599 like the one we need to have. If so, use that existing one. We must
6600 preserve the TYPE_NAME, since there is code that depends on this. */
6601 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6602 if (check_qualified_type (t, type, type_quals))
6603 return t;
6604
6605 return NULL_TREE;
6606 }
6607
6608 /* Like get_qualified_type, but creates the type if it does not
6609 exist. This function never returns NULL_TREE. */
6610
6611 tree
6612 build_qualified_type (tree type, int type_quals)
6613 {
6614 tree t;
6615
6616 /* See if we already have the appropriate qualified variant. */
6617 t = get_qualified_type (type, type_quals);
6618
6619 /* If not, build it. */
6620 if (!t)
6621 {
6622 t = build_variant_type_copy (type);
6623 set_type_quals (t, type_quals);
6624
6625 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6626 {
6627 /* See if this object can map to a basic atomic type. */
6628 tree atomic_type = find_atomic_core_type (type);
6629 if (atomic_type)
6630 {
6631 /* Ensure the alignment of this type is compatible with
6632 the required alignment of the atomic type. */
6633 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6634 SET_TYPE_ALIGN (t, TYPE_ALIGN (atomic_type));
6635 }
6636 }
6637
6638 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6639 /* Propagate structural equality. */
6640 SET_TYPE_STRUCTURAL_EQUALITY (t);
6641 else if (TYPE_CANONICAL (type) != type)
6642 /* Build the underlying canonical type, since it is different
6643 from TYPE. */
6644 {
6645 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6646 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6647 }
6648 else
6649 /* T is its own canonical type. */
6650 TYPE_CANONICAL (t) = t;
6651
6652 }
6653
6654 return t;
6655 }
6656
6657 /* Create a variant of type T with alignment ALIGN. */
6658
6659 tree
6660 build_aligned_type (tree type, unsigned int align)
6661 {
6662 tree t;
6663
6664 if (TYPE_PACKED (type)
6665 || TYPE_ALIGN (type) == align)
6666 return type;
6667
6668 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6669 if (check_aligned_type (t, type, align))
6670 return t;
6671
6672 t = build_variant_type_copy (type);
6673 SET_TYPE_ALIGN (t, align);
6674
6675 return t;
6676 }
6677
6678 /* Create a new distinct copy of TYPE. The new type is made its own
6679 MAIN_VARIANT. If TYPE requires structural equality checks, the
6680 resulting type requires structural equality checks; otherwise, its
6681 TYPE_CANONICAL points to itself. */
6682
6683 tree
6684 build_distinct_type_copy (tree type)
6685 {
6686 tree t = copy_node (type);
6687
6688 TYPE_POINTER_TO (t) = 0;
6689 TYPE_REFERENCE_TO (t) = 0;
6690
6691 /* Set the canonical type either to a new equivalence class, or
6692 propagate the need for structural equality checks. */
6693 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6694 SET_TYPE_STRUCTURAL_EQUALITY (t);
6695 else
6696 TYPE_CANONICAL (t) = t;
6697
6698 /* Make it its own variant. */
6699 TYPE_MAIN_VARIANT (t) = t;
6700 TYPE_NEXT_VARIANT (t) = 0;
6701
6702 /* We do not record methods in type copies nor variants
6703 so we do not need to keep them up to date when new method
6704 is inserted. */
6705 if (RECORD_OR_UNION_TYPE_P (t))
6706 TYPE_METHODS (t) = NULL_TREE;
6707
6708 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6709 whose TREE_TYPE is not t. This can also happen in the Ada
6710 frontend when using subtypes. */
6711
6712 return t;
6713 }
6714
6715 /* Create a new variant of TYPE, equivalent but distinct. This is so
6716 the caller can modify it. TYPE_CANONICAL for the return type will
6717 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6718 are considered equal by the language itself (or that both types
6719 require structural equality checks). */
6720
6721 tree
6722 build_variant_type_copy (tree type)
6723 {
6724 tree t, m = TYPE_MAIN_VARIANT (type);
6725
6726 t = build_distinct_type_copy (type);
6727
6728 /* Since we're building a variant, assume that it is a non-semantic
6729 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6730 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6731 /* Type variants have no alias set defined. */
6732 TYPE_ALIAS_SET (t) = -1;
6733
6734 /* Add the new type to the chain of variants of TYPE. */
6735 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6736 TYPE_NEXT_VARIANT (m) = t;
6737 TYPE_MAIN_VARIANT (t) = m;
6738
6739 return t;
6740 }
6741 \f
6742 /* Return true if the from tree in both tree maps are equal. */
6743
6744 int
6745 tree_map_base_eq (const void *va, const void *vb)
6746 {
6747 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6748 *const b = (const struct tree_map_base *) vb;
6749 return (a->from == b->from);
6750 }
6751
6752 /* Hash a from tree in a tree_base_map. */
6753
6754 unsigned int
6755 tree_map_base_hash (const void *item)
6756 {
6757 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6758 }
6759
6760 /* Return true if this tree map structure is marked for garbage collection
6761 purposes. We simply return true if the from tree is marked, so that this
6762 structure goes away when the from tree goes away. */
6763
6764 int
6765 tree_map_base_marked_p (const void *p)
6766 {
6767 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6768 }
6769
6770 /* Hash a from tree in a tree_map. */
6771
6772 unsigned int
6773 tree_map_hash (const void *item)
6774 {
6775 return (((const struct tree_map *) item)->hash);
6776 }
6777
6778 /* Hash a from tree in a tree_decl_map. */
6779
6780 unsigned int
6781 tree_decl_map_hash (const void *item)
6782 {
6783 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6784 }
6785
6786 /* Return the initialization priority for DECL. */
6787
6788 priority_type
6789 decl_init_priority_lookup (tree decl)
6790 {
6791 symtab_node *snode = symtab_node::get (decl);
6792
6793 if (!snode)
6794 return DEFAULT_INIT_PRIORITY;
6795 return
6796 snode->get_init_priority ();
6797 }
6798
6799 /* Return the finalization priority for DECL. */
6800
6801 priority_type
6802 decl_fini_priority_lookup (tree decl)
6803 {
6804 cgraph_node *node = cgraph_node::get (decl);
6805
6806 if (!node)
6807 return DEFAULT_INIT_PRIORITY;
6808 return
6809 node->get_fini_priority ();
6810 }
6811
6812 /* Set the initialization priority for DECL to PRIORITY. */
6813
6814 void
6815 decl_init_priority_insert (tree decl, priority_type priority)
6816 {
6817 struct symtab_node *snode;
6818
6819 if (priority == DEFAULT_INIT_PRIORITY)
6820 {
6821 snode = symtab_node::get (decl);
6822 if (!snode)
6823 return;
6824 }
6825 else if (TREE_CODE (decl) == VAR_DECL)
6826 snode = varpool_node::get_create (decl);
6827 else
6828 snode = cgraph_node::get_create (decl);
6829 snode->set_init_priority (priority);
6830 }
6831
6832 /* Set the finalization priority for DECL to PRIORITY. */
6833
6834 void
6835 decl_fini_priority_insert (tree decl, priority_type priority)
6836 {
6837 struct cgraph_node *node;
6838
6839 if (priority == DEFAULT_INIT_PRIORITY)
6840 {
6841 node = cgraph_node::get (decl);
6842 if (!node)
6843 return;
6844 }
6845 else
6846 node = cgraph_node::get_create (decl);
6847 node->set_fini_priority (priority);
6848 }
6849
6850 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6851
6852 static void
6853 print_debug_expr_statistics (void)
6854 {
6855 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6856 (long) debug_expr_for_decl->size (),
6857 (long) debug_expr_for_decl->elements (),
6858 debug_expr_for_decl->collisions ());
6859 }
6860
6861 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6862
6863 static void
6864 print_value_expr_statistics (void)
6865 {
6866 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6867 (long) value_expr_for_decl->size (),
6868 (long) value_expr_for_decl->elements (),
6869 value_expr_for_decl->collisions ());
6870 }
6871
6872 /* Lookup a debug expression for FROM, and return it if we find one. */
6873
6874 tree
6875 decl_debug_expr_lookup (tree from)
6876 {
6877 struct tree_decl_map *h, in;
6878 in.base.from = from;
6879
6880 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6881 if (h)
6882 return h->to;
6883 return NULL_TREE;
6884 }
6885
6886 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6887
6888 void
6889 decl_debug_expr_insert (tree from, tree to)
6890 {
6891 struct tree_decl_map *h;
6892
6893 h = ggc_alloc<tree_decl_map> ();
6894 h->base.from = from;
6895 h->to = to;
6896 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6897 }
6898
6899 /* Lookup a value expression for FROM, and return it if we find one. */
6900
6901 tree
6902 decl_value_expr_lookup (tree from)
6903 {
6904 struct tree_decl_map *h, in;
6905 in.base.from = from;
6906
6907 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6908 if (h)
6909 return h->to;
6910 return NULL_TREE;
6911 }
6912
6913 /* Insert a mapping FROM->TO in the value expression hashtable. */
6914
6915 void
6916 decl_value_expr_insert (tree from, tree to)
6917 {
6918 struct tree_decl_map *h;
6919
6920 h = ggc_alloc<tree_decl_map> ();
6921 h->base.from = from;
6922 h->to = to;
6923 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6924 }
6925
6926 /* Lookup a vector of debug arguments for FROM, and return it if we
6927 find one. */
6928
6929 vec<tree, va_gc> **
6930 decl_debug_args_lookup (tree from)
6931 {
6932 struct tree_vec_map *h, in;
6933
6934 if (!DECL_HAS_DEBUG_ARGS_P (from))
6935 return NULL;
6936 gcc_checking_assert (debug_args_for_decl != NULL);
6937 in.base.from = from;
6938 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
6939 if (h)
6940 return &h->to;
6941 return NULL;
6942 }
6943
6944 /* Insert a mapping FROM->empty vector of debug arguments in the value
6945 expression hashtable. */
6946
6947 vec<tree, va_gc> **
6948 decl_debug_args_insert (tree from)
6949 {
6950 struct tree_vec_map *h;
6951 tree_vec_map **loc;
6952
6953 if (DECL_HAS_DEBUG_ARGS_P (from))
6954 return decl_debug_args_lookup (from);
6955 if (debug_args_for_decl == NULL)
6956 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
6957 h = ggc_alloc<tree_vec_map> ();
6958 h->base.from = from;
6959 h->to = NULL;
6960 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
6961 *loc = h;
6962 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6963 return &h->to;
6964 }
6965
6966 /* Hashing of types so that we don't make duplicates.
6967 The entry point is `type_hash_canon'. */
6968
6969 /* Compute a hash code for a list of types (chain of TREE_LIST nodes
6970 with types in the TREE_VALUE slots), by adding the hash codes
6971 of the individual types. */
6972
6973 static void
6974 type_hash_list (const_tree list, inchash::hash &hstate)
6975 {
6976 const_tree tail;
6977
6978 for (tail = list; tail; tail = TREE_CHAIN (tail))
6979 if (TREE_VALUE (tail) != error_mark_node)
6980 hstate.add_object (TYPE_HASH (TREE_VALUE (tail)));
6981 }
6982
6983 /* These are the Hashtable callback functions. */
6984
6985 /* Returns true iff the types are equivalent. */
6986
6987 bool
6988 type_cache_hasher::equal (type_hash *a, type_hash *b)
6989 {
6990 /* First test the things that are the same for all types. */
6991 if (a->hash != b->hash
6992 || TREE_CODE (a->type) != TREE_CODE (b->type)
6993 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6994 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6995 TYPE_ATTRIBUTES (b->type))
6996 || (TREE_CODE (a->type) != COMPLEX_TYPE
6997 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6998 return 0;
6999
7000 /* Be careful about comparing arrays before and after the element type
7001 has been completed; don't compare TYPE_ALIGN unless both types are
7002 complete. */
7003 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
7004 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
7005 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
7006 return 0;
7007
7008 switch (TREE_CODE (a->type))
7009 {
7010 case VOID_TYPE:
7011 case COMPLEX_TYPE:
7012 case POINTER_TYPE:
7013 case REFERENCE_TYPE:
7014 case NULLPTR_TYPE:
7015 return 1;
7016
7017 case VECTOR_TYPE:
7018 return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type);
7019
7020 case ENUMERAL_TYPE:
7021 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
7022 && !(TYPE_VALUES (a->type)
7023 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
7024 && TYPE_VALUES (b->type)
7025 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
7026 && type_list_equal (TYPE_VALUES (a->type),
7027 TYPE_VALUES (b->type))))
7028 return 0;
7029
7030 /* ... fall through ... */
7031
7032 case INTEGER_TYPE:
7033 case REAL_TYPE:
7034 case BOOLEAN_TYPE:
7035 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
7036 return false;
7037 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
7038 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
7039 TYPE_MAX_VALUE (b->type)))
7040 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
7041 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
7042 TYPE_MIN_VALUE (b->type))));
7043
7044 case FIXED_POINT_TYPE:
7045 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
7046
7047 case OFFSET_TYPE:
7048 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
7049
7050 case METHOD_TYPE:
7051 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
7052 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
7053 || (TYPE_ARG_TYPES (a->type)
7054 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
7055 && TYPE_ARG_TYPES (b->type)
7056 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
7057 && type_list_equal (TYPE_ARG_TYPES (a->type),
7058 TYPE_ARG_TYPES (b->type)))))
7059 break;
7060 return 0;
7061 case ARRAY_TYPE:
7062 return TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type);
7063
7064 case RECORD_TYPE:
7065 case UNION_TYPE:
7066 case QUAL_UNION_TYPE:
7067 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
7068 || (TYPE_FIELDS (a->type)
7069 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
7070 && TYPE_FIELDS (b->type)
7071 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
7072 && type_list_equal (TYPE_FIELDS (a->type),
7073 TYPE_FIELDS (b->type))));
7074
7075 case FUNCTION_TYPE:
7076 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
7077 || (TYPE_ARG_TYPES (a->type)
7078 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
7079 && TYPE_ARG_TYPES (b->type)
7080 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
7081 && type_list_equal (TYPE_ARG_TYPES (a->type),
7082 TYPE_ARG_TYPES (b->type))))
7083 break;
7084 return 0;
7085
7086 default:
7087 return 0;
7088 }
7089
7090 if (lang_hooks.types.type_hash_eq != NULL)
7091 return lang_hooks.types.type_hash_eq (a->type, b->type);
7092
7093 return 1;
7094 }
7095
7096 /* Given TYPE, and HASHCODE its hash code, return the canonical
7097 object for an identical type if one already exists.
7098 Otherwise, return TYPE, and record it as the canonical object.
7099
7100 To use this function, first create a type of the sort you want.
7101 Then compute its hash code from the fields of the type that
7102 make it different from other similar types.
7103 Then call this function and use the value. */
7104
7105 tree
7106 type_hash_canon (unsigned int hashcode, tree type)
7107 {
7108 type_hash in;
7109 type_hash **loc;
7110
7111 /* The hash table only contains main variants, so ensure that's what we're
7112 being passed. */
7113 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
7114
7115 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
7116 must call that routine before comparing TYPE_ALIGNs. */
7117 layout_type (type);
7118
7119 in.hash = hashcode;
7120 in.type = type;
7121
7122 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
7123 if (*loc)
7124 {
7125 tree t1 = ((type_hash *) *loc)->type;
7126 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1);
7127 free_node (type);
7128 return t1;
7129 }
7130 else
7131 {
7132 struct type_hash *h;
7133
7134 h = ggc_alloc<type_hash> ();
7135 h->hash = hashcode;
7136 h->type = type;
7137 *loc = h;
7138
7139 return type;
7140 }
7141 }
7142
7143 static void
7144 print_type_hash_statistics (void)
7145 {
7146 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
7147 (long) type_hash_table->size (),
7148 (long) type_hash_table->elements (),
7149 type_hash_table->collisions ());
7150 }
7151
7152 /* Compute a hash code for a list of attributes (chain of TREE_LIST nodes
7153 with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots),
7154 by adding the hash codes of the individual attributes. */
7155
7156 static void
7157 attribute_hash_list (const_tree list, inchash::hash &hstate)
7158 {
7159 const_tree tail;
7160
7161 for (tail = list; tail; tail = TREE_CHAIN (tail))
7162 /* ??? Do we want to add in TREE_VALUE too? */
7163 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (tail)));
7164 }
7165
7166 /* Given two lists of attributes, return true if list l2 is
7167 equivalent to l1. */
7168
7169 int
7170 attribute_list_equal (const_tree l1, const_tree l2)
7171 {
7172 if (l1 == l2)
7173 return 1;
7174
7175 return attribute_list_contained (l1, l2)
7176 && attribute_list_contained (l2, l1);
7177 }
7178
7179 /* Given two lists of attributes, return true if list L2 is
7180 completely contained within L1. */
7181 /* ??? This would be faster if attribute names were stored in a canonicalized
7182 form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method
7183 must be used to show these elements are equivalent (which they are). */
7184 /* ??? It's not clear that attributes with arguments will always be handled
7185 correctly. */
7186
7187 int
7188 attribute_list_contained (const_tree l1, const_tree l2)
7189 {
7190 const_tree t1, t2;
7191
7192 /* First check the obvious, maybe the lists are identical. */
7193 if (l1 == l2)
7194 return 1;
7195
7196 /* Maybe the lists are similar. */
7197 for (t1 = l1, t2 = l2;
7198 t1 != 0 && t2 != 0
7199 && get_attribute_name (t1) == get_attribute_name (t2)
7200 && TREE_VALUE (t1) == TREE_VALUE (t2);
7201 t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7202 ;
7203
7204 /* Maybe the lists are equal. */
7205 if (t1 == 0 && t2 == 0)
7206 return 1;
7207
7208 for (; t2 != 0; t2 = TREE_CHAIN (t2))
7209 {
7210 const_tree attr;
7211 /* This CONST_CAST is okay because lookup_attribute does not
7212 modify its argument and the return value is assigned to a
7213 const_tree. */
7214 for (attr = lookup_ident_attribute (get_attribute_name (t2),
7215 CONST_CAST_TREE (l1));
7216 attr != NULL_TREE && !attribute_value_equal (t2, attr);
7217 attr = lookup_ident_attribute (get_attribute_name (t2),
7218 TREE_CHAIN (attr)))
7219 ;
7220
7221 if (attr == NULL_TREE)
7222 return 0;
7223 }
7224
7225 return 1;
7226 }
7227
7228 /* Given two lists of types
7229 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
7230 return 1 if the lists contain the same types in the same order.
7231 Also, the TREE_PURPOSEs must match. */
7232
7233 int
7234 type_list_equal (const_tree l1, const_tree l2)
7235 {
7236 const_tree t1, t2;
7237
7238 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7239 if (TREE_VALUE (t1) != TREE_VALUE (t2)
7240 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
7241 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
7242 && (TREE_TYPE (TREE_PURPOSE (t1))
7243 == TREE_TYPE (TREE_PURPOSE (t2))))))
7244 return 0;
7245
7246 return t1 == t2;
7247 }
7248
7249 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
7250 given by TYPE. If the argument list accepts variable arguments,
7251 then this function counts only the ordinary arguments. */
7252
7253 int
7254 type_num_arguments (const_tree type)
7255 {
7256 int i = 0;
7257 tree t;
7258
7259 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
7260 /* If the function does not take a variable number of arguments,
7261 the last element in the list will have type `void'. */
7262 if (VOID_TYPE_P (TREE_VALUE (t)))
7263 break;
7264 else
7265 ++i;
7266
7267 return i;
7268 }
7269
7270 /* Nonzero if integer constants T1 and T2
7271 represent the same constant value. */
7272
7273 int
7274 tree_int_cst_equal (const_tree t1, const_tree t2)
7275 {
7276 if (t1 == t2)
7277 return 1;
7278
7279 if (t1 == 0 || t2 == 0)
7280 return 0;
7281
7282 if (TREE_CODE (t1) == INTEGER_CST
7283 && TREE_CODE (t2) == INTEGER_CST
7284 && wi::to_widest (t1) == wi::to_widest (t2))
7285 return 1;
7286
7287 return 0;
7288 }
7289
7290 /* Return true if T is an INTEGER_CST whose numerical value (extended
7291 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
7292
7293 bool
7294 tree_fits_shwi_p (const_tree t)
7295 {
7296 return (t != NULL_TREE
7297 && TREE_CODE (t) == INTEGER_CST
7298 && wi::fits_shwi_p (wi::to_widest (t)));
7299 }
7300
7301 /* Return true if T is an INTEGER_CST whose numerical value (extended
7302 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
7303
7304 bool
7305 tree_fits_uhwi_p (const_tree t)
7306 {
7307 return (t != NULL_TREE
7308 && TREE_CODE (t) == INTEGER_CST
7309 && wi::fits_uhwi_p (wi::to_widest (t)));
7310 }
7311
7312 /* T is an INTEGER_CST whose numerical value (extended according to
7313 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
7314 HOST_WIDE_INT. */
7315
7316 HOST_WIDE_INT
7317 tree_to_shwi (const_tree t)
7318 {
7319 gcc_assert (tree_fits_shwi_p (t));
7320 return TREE_INT_CST_LOW (t);
7321 }
7322
7323 /* T is an INTEGER_CST whose numerical value (extended according to
7324 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7325 HOST_WIDE_INT. */
7326
7327 unsigned HOST_WIDE_INT
7328 tree_to_uhwi (const_tree t)
7329 {
7330 gcc_assert (tree_fits_uhwi_p (t));
7331 return TREE_INT_CST_LOW (t);
7332 }
7333
7334 /* Return the most significant (sign) bit of T. */
7335
7336 int
7337 tree_int_cst_sign_bit (const_tree t)
7338 {
7339 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7340
7341 return wi::extract_uhwi (t, bitno, 1);
7342 }
7343
7344 /* Return an indication of the sign of the integer constant T.
7345 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7346 Note that -1 will never be returned if T's type is unsigned. */
7347
7348 int
7349 tree_int_cst_sgn (const_tree t)
7350 {
7351 if (wi::eq_p (t, 0))
7352 return 0;
7353 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7354 return 1;
7355 else if (wi::neg_p (t))
7356 return -1;
7357 else
7358 return 1;
7359 }
7360
7361 /* Return the minimum number of bits needed to represent VALUE in a
7362 signed or unsigned type, UNSIGNEDP says which. */
7363
7364 unsigned int
7365 tree_int_cst_min_precision (tree value, signop sgn)
7366 {
7367 /* If the value is negative, compute its negative minus 1. The latter
7368 adjustment is because the absolute value of the largest negative value
7369 is one larger than the largest positive value. This is equivalent to
7370 a bit-wise negation, so use that operation instead. */
7371
7372 if (tree_int_cst_sgn (value) < 0)
7373 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7374
7375 /* Return the number of bits needed, taking into account the fact
7376 that we need one more bit for a signed than unsigned type.
7377 If value is 0 or -1, the minimum precision is 1 no matter
7378 whether unsignedp is true or false. */
7379
7380 if (integer_zerop (value))
7381 return 1;
7382 else
7383 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7384 }
7385
7386 /* Return truthvalue of whether T1 is the same tree structure as T2.
7387 Return 1 if they are the same.
7388 Return 0 if they are understandably different.
7389 Return -1 if either contains tree structure not understood by
7390 this function. */
7391
7392 int
7393 simple_cst_equal (const_tree t1, const_tree t2)
7394 {
7395 enum tree_code code1, code2;
7396 int cmp;
7397 int i;
7398
7399 if (t1 == t2)
7400 return 1;
7401 if (t1 == 0 || t2 == 0)
7402 return 0;
7403
7404 code1 = TREE_CODE (t1);
7405 code2 = TREE_CODE (t2);
7406
7407 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7408 {
7409 if (CONVERT_EXPR_CODE_P (code2)
7410 || code2 == NON_LVALUE_EXPR)
7411 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7412 else
7413 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7414 }
7415
7416 else if (CONVERT_EXPR_CODE_P (code2)
7417 || code2 == NON_LVALUE_EXPR)
7418 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7419
7420 if (code1 != code2)
7421 return 0;
7422
7423 switch (code1)
7424 {
7425 case INTEGER_CST:
7426 return wi::to_widest (t1) == wi::to_widest (t2);
7427
7428 case REAL_CST:
7429 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
7430
7431 case FIXED_CST:
7432 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7433
7434 case STRING_CST:
7435 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7436 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7437 TREE_STRING_LENGTH (t1)));
7438
7439 case CONSTRUCTOR:
7440 {
7441 unsigned HOST_WIDE_INT idx;
7442 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7443 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7444
7445 if (vec_safe_length (v1) != vec_safe_length (v2))
7446 return false;
7447
7448 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7449 /* ??? Should we handle also fields here? */
7450 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7451 return false;
7452 return true;
7453 }
7454
7455 case SAVE_EXPR:
7456 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7457
7458 case CALL_EXPR:
7459 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7460 if (cmp <= 0)
7461 return cmp;
7462 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7463 return 0;
7464 {
7465 const_tree arg1, arg2;
7466 const_call_expr_arg_iterator iter1, iter2;
7467 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7468 arg2 = first_const_call_expr_arg (t2, &iter2);
7469 arg1 && arg2;
7470 arg1 = next_const_call_expr_arg (&iter1),
7471 arg2 = next_const_call_expr_arg (&iter2))
7472 {
7473 cmp = simple_cst_equal (arg1, arg2);
7474 if (cmp <= 0)
7475 return cmp;
7476 }
7477 return arg1 == arg2;
7478 }
7479
7480 case TARGET_EXPR:
7481 /* Special case: if either target is an unallocated VAR_DECL,
7482 it means that it's going to be unified with whatever the
7483 TARGET_EXPR is really supposed to initialize, so treat it
7484 as being equivalent to anything. */
7485 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7486 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7487 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7488 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7489 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7490 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7491 cmp = 1;
7492 else
7493 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7494
7495 if (cmp <= 0)
7496 return cmp;
7497
7498 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7499
7500 case WITH_CLEANUP_EXPR:
7501 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7502 if (cmp <= 0)
7503 return cmp;
7504
7505 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7506
7507 case COMPONENT_REF:
7508 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7509 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7510
7511 return 0;
7512
7513 case VAR_DECL:
7514 case PARM_DECL:
7515 case CONST_DECL:
7516 case FUNCTION_DECL:
7517 return 0;
7518
7519 default:
7520 break;
7521 }
7522
7523 /* This general rule works for most tree codes. All exceptions should be
7524 handled above. If this is a language-specific tree code, we can't
7525 trust what might be in the operand, so say we don't know
7526 the situation. */
7527 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7528 return -1;
7529
7530 switch (TREE_CODE_CLASS (code1))
7531 {
7532 case tcc_unary:
7533 case tcc_binary:
7534 case tcc_comparison:
7535 case tcc_expression:
7536 case tcc_reference:
7537 case tcc_statement:
7538 cmp = 1;
7539 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7540 {
7541 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7542 if (cmp <= 0)
7543 return cmp;
7544 }
7545
7546 return cmp;
7547
7548 default:
7549 return -1;
7550 }
7551 }
7552
7553 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7554 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7555 than U, respectively. */
7556
7557 int
7558 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7559 {
7560 if (tree_int_cst_sgn (t) < 0)
7561 return -1;
7562 else if (!tree_fits_uhwi_p (t))
7563 return 1;
7564 else if (TREE_INT_CST_LOW (t) == u)
7565 return 0;
7566 else if (TREE_INT_CST_LOW (t) < u)
7567 return -1;
7568 else
7569 return 1;
7570 }
7571
7572 /* Return true if SIZE represents a constant size that is in bounds of
7573 what the middle-end and the backend accepts (covering not more than
7574 half of the address-space). */
7575
7576 bool
7577 valid_constant_size_p (const_tree size)
7578 {
7579 if (! tree_fits_uhwi_p (size)
7580 || TREE_OVERFLOW (size)
7581 || tree_int_cst_sign_bit (size) != 0)
7582 return false;
7583 return true;
7584 }
7585
7586 /* Return the precision of the type, or for a complex or vector type the
7587 precision of the type of its elements. */
7588
7589 unsigned int
7590 element_precision (const_tree type)
7591 {
7592 if (!TYPE_P (type))
7593 type = TREE_TYPE (type);
7594 enum tree_code code = TREE_CODE (type);
7595 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7596 type = TREE_TYPE (type);
7597
7598 return TYPE_PRECISION (type);
7599 }
7600
7601 /* Return true if CODE represents an associative tree code. Otherwise
7602 return false. */
7603 bool
7604 associative_tree_code (enum tree_code code)
7605 {
7606 switch (code)
7607 {
7608 case BIT_IOR_EXPR:
7609 case BIT_AND_EXPR:
7610 case BIT_XOR_EXPR:
7611 case PLUS_EXPR:
7612 case MULT_EXPR:
7613 case MIN_EXPR:
7614 case MAX_EXPR:
7615 return true;
7616
7617 default:
7618 break;
7619 }
7620 return false;
7621 }
7622
7623 /* Return true if CODE represents a commutative tree code. Otherwise
7624 return false. */
7625 bool
7626 commutative_tree_code (enum tree_code code)
7627 {
7628 switch (code)
7629 {
7630 case PLUS_EXPR:
7631 case MULT_EXPR:
7632 case MULT_HIGHPART_EXPR:
7633 case MIN_EXPR:
7634 case MAX_EXPR:
7635 case BIT_IOR_EXPR:
7636 case BIT_XOR_EXPR:
7637 case BIT_AND_EXPR:
7638 case NE_EXPR:
7639 case EQ_EXPR:
7640 case UNORDERED_EXPR:
7641 case ORDERED_EXPR:
7642 case UNEQ_EXPR:
7643 case LTGT_EXPR:
7644 case TRUTH_AND_EXPR:
7645 case TRUTH_XOR_EXPR:
7646 case TRUTH_OR_EXPR:
7647 case WIDEN_MULT_EXPR:
7648 case VEC_WIDEN_MULT_HI_EXPR:
7649 case VEC_WIDEN_MULT_LO_EXPR:
7650 case VEC_WIDEN_MULT_EVEN_EXPR:
7651 case VEC_WIDEN_MULT_ODD_EXPR:
7652 return true;
7653
7654 default:
7655 break;
7656 }
7657 return false;
7658 }
7659
7660 /* Return true if CODE represents a ternary tree code for which the
7661 first two operands are commutative. Otherwise return false. */
7662 bool
7663 commutative_ternary_tree_code (enum tree_code code)
7664 {
7665 switch (code)
7666 {
7667 case WIDEN_MULT_PLUS_EXPR:
7668 case WIDEN_MULT_MINUS_EXPR:
7669 case DOT_PROD_EXPR:
7670 case FMA_EXPR:
7671 return true;
7672
7673 default:
7674 break;
7675 }
7676 return false;
7677 }
7678
7679 /* Returns true if CODE can overflow. */
7680
7681 bool
7682 operation_can_overflow (enum tree_code code)
7683 {
7684 switch (code)
7685 {
7686 case PLUS_EXPR:
7687 case MINUS_EXPR:
7688 case MULT_EXPR:
7689 case LSHIFT_EXPR:
7690 /* Can overflow in various ways. */
7691 return true;
7692 case TRUNC_DIV_EXPR:
7693 case EXACT_DIV_EXPR:
7694 case FLOOR_DIV_EXPR:
7695 case CEIL_DIV_EXPR:
7696 /* For INT_MIN / -1. */
7697 return true;
7698 case NEGATE_EXPR:
7699 case ABS_EXPR:
7700 /* For -INT_MIN. */
7701 return true;
7702 default:
7703 /* These operators cannot overflow. */
7704 return false;
7705 }
7706 }
7707
7708 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
7709 ftrapv doesn't generate trapping insns for CODE. */
7710
7711 bool
7712 operation_no_trapping_overflow (tree type, enum tree_code code)
7713 {
7714 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type));
7715
7716 /* We don't generate instructions that trap on overflow for complex or vector
7717 types. */
7718 if (!INTEGRAL_TYPE_P (type))
7719 return true;
7720
7721 if (!TYPE_OVERFLOW_TRAPS (type))
7722 return true;
7723
7724 switch (code)
7725 {
7726 case PLUS_EXPR:
7727 case MINUS_EXPR:
7728 case MULT_EXPR:
7729 case NEGATE_EXPR:
7730 case ABS_EXPR:
7731 /* These operators can overflow, and -ftrapv generates trapping code for
7732 these. */
7733 return false;
7734 case TRUNC_DIV_EXPR:
7735 case EXACT_DIV_EXPR:
7736 case FLOOR_DIV_EXPR:
7737 case CEIL_DIV_EXPR:
7738 case LSHIFT_EXPR:
7739 /* These operators can overflow, but -ftrapv does not generate trapping
7740 code for these. */
7741 return true;
7742 default:
7743 /* These operators cannot overflow. */
7744 return true;
7745 }
7746 }
7747
7748 namespace inchash
7749 {
7750
7751 /* Generate a hash value for an expression. This can be used iteratively
7752 by passing a previous result as the HSTATE argument.
7753
7754 This function is intended to produce the same hash for expressions which
7755 would compare equal using operand_equal_p. */
7756 void
7757 add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
7758 {
7759 int i;
7760 enum tree_code code;
7761 enum tree_code_class tclass;
7762
7763 if (t == NULL_TREE)
7764 {
7765 hstate.merge_hash (0);
7766 return;
7767 }
7768
7769 if (!(flags & OEP_ADDRESS_OF))
7770 STRIP_NOPS (t);
7771
7772 code = TREE_CODE (t);
7773
7774 switch (code)
7775 {
7776 /* Alas, constants aren't shared, so we can't rely on pointer
7777 identity. */
7778 case VOID_CST:
7779 hstate.merge_hash (0);
7780 return;
7781 case INTEGER_CST:
7782 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
7783 for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
7784 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
7785 return;
7786 case REAL_CST:
7787 {
7788 unsigned int val2;
7789 if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
7790 val2 = rvc_zero;
7791 else
7792 val2 = real_hash (TREE_REAL_CST_PTR (t));
7793 hstate.merge_hash (val2);
7794 return;
7795 }
7796 case FIXED_CST:
7797 {
7798 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7799 hstate.merge_hash (val2);
7800 return;
7801 }
7802 case STRING_CST:
7803 hstate.add ((const void *) TREE_STRING_POINTER (t),
7804 TREE_STRING_LENGTH (t));
7805 return;
7806 case COMPLEX_CST:
7807 inchash::add_expr (TREE_REALPART (t), hstate, flags);
7808 inchash::add_expr (TREE_IMAGPART (t), hstate, flags);
7809 return;
7810 case VECTOR_CST:
7811 {
7812 unsigned i;
7813 for (i = 0; i < VECTOR_CST_NELTS (t); ++i)
7814 inchash::add_expr (VECTOR_CST_ELT (t, i), hstate, flags);
7815 return;
7816 }
7817 case SSA_NAME:
7818 /* We can just compare by pointer. */
7819 hstate.add_wide_int (SSA_NAME_VERSION (t));
7820 return;
7821 case PLACEHOLDER_EXPR:
7822 /* The node itself doesn't matter. */
7823 return;
7824 case BLOCK:
7825 case OMP_CLAUSE:
7826 /* Ignore. */
7827 return;
7828 case TREE_LIST:
7829 /* A list of expressions, for a CALL_EXPR or as the elements of a
7830 VECTOR_CST. */
7831 for (; t; t = TREE_CHAIN (t))
7832 inchash::add_expr (TREE_VALUE (t), hstate, flags);
7833 return;
7834 case CONSTRUCTOR:
7835 {
7836 unsigned HOST_WIDE_INT idx;
7837 tree field, value;
7838 flags &= ~OEP_ADDRESS_OF;
7839 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7840 {
7841 inchash::add_expr (field, hstate, flags);
7842 inchash::add_expr (value, hstate, flags);
7843 }
7844 return;
7845 }
7846 case STATEMENT_LIST:
7847 {
7848 tree_stmt_iterator i;
7849 for (i = tsi_start (CONST_CAST_TREE (t));
7850 !tsi_end_p (i); tsi_next (&i))
7851 inchash::add_expr (tsi_stmt (i), hstate, flags);
7852 return;
7853 }
7854 case FUNCTION_DECL:
7855 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7856 Otherwise nodes that compare equal according to operand_equal_p might
7857 get different hash codes. However, don't do this for machine specific
7858 or front end builtins, since the function code is overloaded in those
7859 cases. */
7860 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7861 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7862 {
7863 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7864 code = TREE_CODE (t);
7865 }
7866 /* FALL THROUGH */
7867 default:
7868 tclass = TREE_CODE_CLASS (code);
7869
7870 if (tclass == tcc_declaration)
7871 {
7872 /* DECL's have a unique ID */
7873 hstate.add_wide_int (DECL_UID (t));
7874 }
7875 else if (tclass == tcc_comparison && !commutative_tree_code (code))
7876 {
7877 /* For comparisons that can be swapped, use the lower
7878 tree code. */
7879 enum tree_code ccode = swap_tree_comparison (code);
7880 if (code < ccode)
7881 ccode = code;
7882 hstate.add_object (ccode);
7883 inchash::add_expr (TREE_OPERAND (t, ccode != code), hstate, flags);
7884 inchash::add_expr (TREE_OPERAND (t, ccode == code), hstate, flags);
7885 }
7886 else if (CONVERT_EXPR_CODE_P (code))
7887 {
7888 /* NOP_EXPR and CONVERT_EXPR are considered equal by
7889 operand_equal_p. */
7890 enum tree_code ccode = NOP_EXPR;
7891 hstate.add_object (ccode);
7892
7893 /* Don't hash the type, that can lead to having nodes which
7894 compare equal according to operand_equal_p, but which
7895 have different hash codes. Make sure to include signedness
7896 in the hash computation. */
7897 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7898 inchash::add_expr (TREE_OPERAND (t, 0), hstate, flags);
7899 }
7900 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
7901 else if (code == MEM_REF
7902 && (flags & OEP_ADDRESS_OF) != 0
7903 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
7904 && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
7905 && integer_zerop (TREE_OPERAND (t, 1)))
7906 inchash::add_expr (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
7907 hstate, flags);
7908 /* Don't ICE on FE specific trees, or their arguments etc.
7909 during operand_equal_p hash verification. */
7910 else if (!IS_EXPR_CODE_CLASS (tclass))
7911 gcc_assert (flags & OEP_HASH_CHECK);
7912 else
7913 {
7914 unsigned int sflags = flags;
7915
7916 hstate.add_object (code);
7917
7918 switch (code)
7919 {
7920 case ADDR_EXPR:
7921 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
7922 flags |= OEP_ADDRESS_OF;
7923 sflags = flags;
7924 break;
7925
7926 case INDIRECT_REF:
7927 case MEM_REF:
7928 case TARGET_MEM_REF:
7929 flags &= ~OEP_ADDRESS_OF;
7930 sflags = flags;
7931 break;
7932
7933 case ARRAY_REF:
7934 case ARRAY_RANGE_REF:
7935 case COMPONENT_REF:
7936 case BIT_FIELD_REF:
7937 sflags &= ~OEP_ADDRESS_OF;
7938 break;
7939
7940 case COND_EXPR:
7941 flags &= ~OEP_ADDRESS_OF;
7942 break;
7943
7944 case FMA_EXPR:
7945 case WIDEN_MULT_PLUS_EXPR:
7946 case WIDEN_MULT_MINUS_EXPR:
7947 {
7948 /* The multiplication operands are commutative. */
7949 inchash::hash one, two;
7950 inchash::add_expr (TREE_OPERAND (t, 0), one, flags);
7951 inchash::add_expr (TREE_OPERAND (t, 1), two, flags);
7952 hstate.add_commutative (one, two);
7953 inchash::add_expr (TREE_OPERAND (t, 2), two, flags);
7954 return;
7955 }
7956
7957 case CALL_EXPR:
7958 if (CALL_EXPR_FN (t) == NULL_TREE)
7959 hstate.add_int (CALL_EXPR_IFN (t));
7960 break;
7961
7962 case TARGET_EXPR:
7963 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
7964 Usually different TARGET_EXPRs just should use
7965 different temporaries in their slots. */
7966 inchash::add_expr (TARGET_EXPR_SLOT (t), hstate, flags);
7967 return;
7968
7969 default:
7970 break;
7971 }
7972
7973 /* Don't hash the type, that can lead to having nodes which
7974 compare equal according to operand_equal_p, but which
7975 have different hash codes. */
7976 if (code == NON_LVALUE_EXPR)
7977 {
7978 /* Make sure to include signness in the hash computation. */
7979 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7980 inchash::add_expr (TREE_OPERAND (t, 0), hstate, flags);
7981 }
7982
7983 else if (commutative_tree_code (code))
7984 {
7985 /* It's a commutative expression. We want to hash it the same
7986 however it appears. We do this by first hashing both operands
7987 and then rehashing based on the order of their independent
7988 hashes. */
7989 inchash::hash one, two;
7990 inchash::add_expr (TREE_OPERAND (t, 0), one, flags);
7991 inchash::add_expr (TREE_OPERAND (t, 1), two, flags);
7992 hstate.add_commutative (one, two);
7993 }
7994 else
7995 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
7996 inchash::add_expr (TREE_OPERAND (t, i), hstate,
7997 i == 0 ? flags : sflags);
7998 }
7999 return;
8000 }
8001 }
8002
8003 }
8004
8005 /* Constructors for pointer, array and function types.
8006 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
8007 constructed by language-dependent code, not here.) */
8008
8009 /* Construct, lay out and return the type of pointers to TO_TYPE with
8010 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
8011 reference all of memory. If such a type has already been
8012 constructed, reuse it. */
8013
8014 tree
8015 build_pointer_type_for_mode (tree to_type, machine_mode mode,
8016 bool can_alias_all)
8017 {
8018 tree t;
8019 bool could_alias = can_alias_all;
8020
8021 if (to_type == error_mark_node)
8022 return error_mark_node;
8023
8024 /* If the pointed-to type has the may_alias attribute set, force
8025 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
8026 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
8027 can_alias_all = true;
8028
8029 /* In some cases, languages will have things that aren't a POINTER_TYPE
8030 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
8031 In that case, return that type without regard to the rest of our
8032 operands.
8033
8034 ??? This is a kludge, but consistent with the way this function has
8035 always operated and there doesn't seem to be a good way to avoid this
8036 at the moment. */
8037 if (TYPE_POINTER_TO (to_type) != 0
8038 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
8039 return TYPE_POINTER_TO (to_type);
8040
8041 /* First, if we already have a type for pointers to TO_TYPE and it's
8042 the proper mode, use it. */
8043 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
8044 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
8045 return t;
8046
8047 t = make_node (POINTER_TYPE);
8048
8049 TREE_TYPE (t) = to_type;
8050 SET_TYPE_MODE (t, mode);
8051 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
8052 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
8053 TYPE_POINTER_TO (to_type) = t;
8054
8055 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
8056 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
8057 SET_TYPE_STRUCTURAL_EQUALITY (t);
8058 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
8059 TYPE_CANONICAL (t)
8060 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
8061 mode, false);
8062
8063 /* Lay out the type. This function has many callers that are concerned
8064 with expression-construction, and this simplifies them all. */
8065 layout_type (t);
8066
8067 return t;
8068 }
8069
8070 /* By default build pointers in ptr_mode. */
8071
8072 tree
8073 build_pointer_type (tree to_type)
8074 {
8075 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
8076 : TYPE_ADDR_SPACE (to_type);
8077 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
8078 return build_pointer_type_for_mode (to_type, pointer_mode, false);
8079 }
8080
8081 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
8082
8083 tree
8084 build_reference_type_for_mode (tree to_type, machine_mode mode,
8085 bool can_alias_all)
8086 {
8087 tree t;
8088 bool could_alias = can_alias_all;
8089
8090 if (to_type == error_mark_node)
8091 return error_mark_node;
8092
8093 /* If the pointed-to type has the may_alias attribute set, force
8094 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
8095 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
8096 can_alias_all = true;
8097
8098 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
8099 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
8100 In that case, return that type without regard to the rest of our
8101 operands.
8102
8103 ??? This is a kludge, but consistent with the way this function has
8104 always operated and there doesn't seem to be a good way to avoid this
8105 at the moment. */
8106 if (TYPE_REFERENCE_TO (to_type) != 0
8107 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
8108 return TYPE_REFERENCE_TO (to_type);
8109
8110 /* First, if we already have a type for pointers to TO_TYPE and it's
8111 the proper mode, use it. */
8112 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
8113 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
8114 return t;
8115
8116 t = make_node (REFERENCE_TYPE);
8117
8118 TREE_TYPE (t) = to_type;
8119 SET_TYPE_MODE (t, mode);
8120 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
8121 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
8122 TYPE_REFERENCE_TO (to_type) = t;
8123
8124 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
8125 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
8126 SET_TYPE_STRUCTURAL_EQUALITY (t);
8127 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
8128 TYPE_CANONICAL (t)
8129 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
8130 mode, false);
8131
8132 layout_type (t);
8133
8134 return t;
8135 }
8136
8137
8138 /* Build the node for the type of references-to-TO_TYPE by default
8139 in ptr_mode. */
8140
8141 tree
8142 build_reference_type (tree to_type)
8143 {
8144 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
8145 : TYPE_ADDR_SPACE (to_type);
8146 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
8147 return build_reference_type_for_mode (to_type, pointer_mode, false);
8148 }
8149
8150 #define MAX_INT_CACHED_PREC \
8151 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
8152 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
8153
8154 /* Builds a signed or unsigned integer type of precision PRECISION.
8155 Used for C bitfields whose precision does not match that of
8156 built-in target types. */
8157 tree
8158 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
8159 int unsignedp)
8160 {
8161 tree itype, ret;
8162
8163 if (unsignedp)
8164 unsignedp = MAX_INT_CACHED_PREC + 1;
8165
8166 if (precision <= MAX_INT_CACHED_PREC)
8167 {
8168 itype = nonstandard_integer_type_cache[precision + unsignedp];
8169 if (itype)
8170 return itype;
8171 }
8172
8173 itype = make_node (INTEGER_TYPE);
8174 TYPE_PRECISION (itype) = precision;
8175
8176 if (unsignedp)
8177 fixup_unsigned_type (itype);
8178 else
8179 fixup_signed_type (itype);
8180
8181 ret = itype;
8182 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype)))
8183 ret = type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype)), itype);
8184 if (precision <= MAX_INT_CACHED_PREC)
8185 nonstandard_integer_type_cache[precision + unsignedp] = ret;
8186
8187 return ret;
8188 }
8189
8190 #define MAX_BOOL_CACHED_PREC \
8191 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
8192 static GTY(()) tree nonstandard_boolean_type_cache[MAX_BOOL_CACHED_PREC + 1];
8193
8194 /* Builds a boolean type of precision PRECISION.
8195 Used for boolean vectors to choose proper vector element size. */
8196 tree
8197 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision)
8198 {
8199 tree type;
8200
8201 if (precision <= MAX_BOOL_CACHED_PREC)
8202 {
8203 type = nonstandard_boolean_type_cache[precision];
8204 if (type)
8205 return type;
8206 }
8207
8208 type = make_node (BOOLEAN_TYPE);
8209 TYPE_PRECISION (type) = precision;
8210 fixup_signed_type (type);
8211
8212 if (precision <= MAX_INT_CACHED_PREC)
8213 nonstandard_boolean_type_cache[precision] = type;
8214
8215 return type;
8216 }
8217
8218 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
8219 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
8220 is true, reuse such a type that has already been constructed. */
8221
8222 static tree
8223 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
8224 {
8225 tree itype = make_node (INTEGER_TYPE);
8226 inchash::hash hstate;
8227
8228 TREE_TYPE (itype) = type;
8229
8230 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
8231 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
8232
8233 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
8234 SET_TYPE_MODE (itype, TYPE_MODE (type));
8235 TYPE_SIZE (itype) = TYPE_SIZE (type);
8236 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
8237 SET_TYPE_ALIGN (itype, TYPE_ALIGN (type));
8238 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
8239
8240 if (!shared)
8241 return itype;
8242
8243 if ((TYPE_MIN_VALUE (itype)
8244 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
8245 || (TYPE_MAX_VALUE (itype)
8246 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
8247 {
8248 /* Since we cannot reliably merge this type, we need to compare it using
8249 structural equality checks. */
8250 SET_TYPE_STRUCTURAL_EQUALITY (itype);
8251 return itype;
8252 }
8253
8254 inchash::add_expr (TYPE_MIN_VALUE (itype), hstate);
8255 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
8256 hstate.merge_hash (TYPE_HASH (type));
8257 itype = type_hash_canon (hstate.end (), itype);
8258
8259 return itype;
8260 }
8261
8262 /* Wrapper around build_range_type_1 with SHARED set to true. */
8263
8264 tree
8265 build_range_type (tree type, tree lowval, tree highval)
8266 {
8267 return build_range_type_1 (type, lowval, highval, true);
8268 }
8269
8270 /* Wrapper around build_range_type_1 with SHARED set to false. */
8271
8272 tree
8273 build_nonshared_range_type (tree type, tree lowval, tree highval)
8274 {
8275 return build_range_type_1 (type, lowval, highval, false);
8276 }
8277
8278 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
8279 MAXVAL should be the maximum value in the domain
8280 (one less than the length of the array).
8281
8282 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
8283 We don't enforce this limit, that is up to caller (e.g. language front end).
8284 The limit exists because the result is a signed type and we don't handle
8285 sizes that use more than one HOST_WIDE_INT. */
8286
8287 tree
8288 build_index_type (tree maxval)
8289 {
8290 return build_range_type (sizetype, size_zero_node, maxval);
8291 }
8292
8293 /* Return true if the debug information for TYPE, a subtype, should be emitted
8294 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
8295 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
8296 debug info and doesn't reflect the source code. */
8297
8298 bool
8299 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
8300 {
8301 tree base_type = TREE_TYPE (type), low, high;
8302
8303 /* Subrange types have a base type which is an integral type. */
8304 if (!INTEGRAL_TYPE_P (base_type))
8305 return false;
8306
8307 /* Get the real bounds of the subtype. */
8308 if (lang_hooks.types.get_subrange_bounds)
8309 lang_hooks.types.get_subrange_bounds (type, &low, &high);
8310 else
8311 {
8312 low = TYPE_MIN_VALUE (type);
8313 high = TYPE_MAX_VALUE (type);
8314 }
8315
8316 /* If the type and its base type have the same representation and the same
8317 name, then the type is not a subrange but a copy of the base type. */
8318 if ((TREE_CODE (base_type) == INTEGER_TYPE
8319 || TREE_CODE (base_type) == BOOLEAN_TYPE)
8320 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
8321 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
8322 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
8323 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
8324 return false;
8325
8326 if (lowval)
8327 *lowval = low;
8328 if (highval)
8329 *highval = high;
8330 return true;
8331 }
8332
8333 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
8334 and number of elements specified by the range of values of INDEX_TYPE.
8335 If SHARED is true, reuse such a type that has already been constructed. */
8336
8337 static tree
8338 build_array_type_1 (tree elt_type, tree index_type, bool shared)
8339 {
8340 tree t;
8341
8342 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
8343 {
8344 error ("arrays of functions are not meaningful");
8345 elt_type = integer_type_node;
8346 }
8347
8348 t = make_node (ARRAY_TYPE);
8349 TREE_TYPE (t) = elt_type;
8350 TYPE_DOMAIN (t) = index_type;
8351 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
8352 layout_type (t);
8353
8354 /* If the element type is incomplete at this point we get marked for
8355 structural equality. Do not record these types in the canonical
8356 type hashtable. */
8357 if (TYPE_STRUCTURAL_EQUALITY_P (t))
8358 return t;
8359
8360 if (shared)
8361 {
8362 inchash::hash hstate;
8363 hstate.add_object (TYPE_HASH (elt_type));
8364 if (index_type)
8365 hstate.add_object (TYPE_HASH (index_type));
8366 t = type_hash_canon (hstate.end (), t);
8367 }
8368
8369 if (TYPE_CANONICAL (t) == t)
8370 {
8371 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
8372 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type))
8373 || in_lto_p)
8374 SET_TYPE_STRUCTURAL_EQUALITY (t);
8375 else if (TYPE_CANONICAL (elt_type) != elt_type
8376 || (index_type && TYPE_CANONICAL (index_type) != index_type))
8377 TYPE_CANONICAL (t)
8378 = build_array_type_1 (TYPE_CANONICAL (elt_type),
8379 index_type
8380 ? TYPE_CANONICAL (index_type) : NULL_TREE,
8381 shared);
8382 }
8383
8384 return t;
8385 }
8386
8387 /* Wrapper around build_array_type_1 with SHARED set to true. */
8388
8389 tree
8390 build_array_type (tree elt_type, tree index_type)
8391 {
8392 return build_array_type_1 (elt_type, index_type, true);
8393 }
8394
8395 /* Wrapper around build_array_type_1 with SHARED set to false. */
8396
8397 tree
8398 build_nonshared_array_type (tree elt_type, tree index_type)
8399 {
8400 return build_array_type_1 (elt_type, index_type, false);
8401 }
8402
8403 /* Return a representation of ELT_TYPE[NELTS], using indices of type
8404 sizetype. */
8405
8406 tree
8407 build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
8408 {
8409 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
8410 }
8411
8412 /* Recursively examines the array elements of TYPE, until a non-array
8413 element type is found. */
8414
8415 tree
8416 strip_array_types (tree type)
8417 {
8418 while (TREE_CODE (type) == ARRAY_TYPE)
8419 type = TREE_TYPE (type);
8420
8421 return type;
8422 }
8423
8424 /* Computes the canonical argument types from the argument type list
8425 ARGTYPES.
8426
8427 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
8428 on entry to this function, or if any of the ARGTYPES are
8429 structural.
8430
8431 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
8432 true on entry to this function, or if any of the ARGTYPES are
8433 non-canonical.
8434
8435 Returns a canonical argument list, which may be ARGTYPES when the
8436 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
8437 true) or would not differ from ARGTYPES. */
8438
8439 static tree
8440 maybe_canonicalize_argtypes (tree argtypes,
8441 bool *any_structural_p,
8442 bool *any_noncanonical_p)
8443 {
8444 tree arg;
8445 bool any_noncanonical_argtypes_p = false;
8446
8447 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
8448 {
8449 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
8450 /* Fail gracefully by stating that the type is structural. */
8451 *any_structural_p = true;
8452 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
8453 *any_structural_p = true;
8454 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
8455 || TREE_PURPOSE (arg))
8456 /* If the argument has a default argument, we consider it
8457 non-canonical even though the type itself is canonical.
8458 That way, different variants of function and method types
8459 with default arguments will all point to the variant with
8460 no defaults as their canonical type. */
8461 any_noncanonical_argtypes_p = true;
8462 }
8463
8464 if (*any_structural_p)
8465 return argtypes;
8466
8467 if (any_noncanonical_argtypes_p)
8468 {
8469 /* Build the canonical list of argument types. */
8470 tree canon_argtypes = NULL_TREE;
8471 bool is_void = false;
8472
8473 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
8474 {
8475 if (arg == void_list_node)
8476 is_void = true;
8477 else
8478 canon_argtypes = tree_cons (NULL_TREE,
8479 TYPE_CANONICAL (TREE_VALUE (arg)),
8480 canon_argtypes);
8481 }
8482
8483 canon_argtypes = nreverse (canon_argtypes);
8484 if (is_void)
8485 canon_argtypes = chainon (canon_argtypes, void_list_node);
8486
8487 /* There is a non-canonical type. */
8488 *any_noncanonical_p = true;
8489 return canon_argtypes;
8490 }
8491
8492 /* The canonical argument types are the same as ARGTYPES. */
8493 return argtypes;
8494 }
8495
8496 /* Construct, lay out and return
8497 the type of functions returning type VALUE_TYPE
8498 given arguments of types ARG_TYPES.
8499 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
8500 are data type nodes for the arguments of the function.
8501 If such a type has already been constructed, reuse it. */
8502
8503 tree
8504 build_function_type (tree value_type, tree arg_types)
8505 {
8506 tree t;
8507 inchash::hash hstate;
8508 bool any_structural_p, any_noncanonical_p;
8509 tree canon_argtypes;
8510
8511 if (TREE_CODE (value_type) == FUNCTION_TYPE)
8512 {
8513 error ("function return type cannot be function");
8514 value_type = integer_type_node;
8515 }
8516
8517 /* Make a node of the sort we want. */
8518 t = make_node (FUNCTION_TYPE);
8519 TREE_TYPE (t) = value_type;
8520 TYPE_ARG_TYPES (t) = arg_types;
8521
8522 /* If we already have such a type, use the old one. */
8523 hstate.add_object (TYPE_HASH (value_type));
8524 type_hash_list (arg_types, hstate);
8525 t = type_hash_canon (hstate.end (), t);
8526
8527 /* Set up the canonical type. */
8528 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8529 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8530 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8531 &any_structural_p,
8532 &any_noncanonical_p);
8533 if (any_structural_p)
8534 SET_TYPE_STRUCTURAL_EQUALITY (t);
8535 else if (any_noncanonical_p)
8536 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8537 canon_argtypes);
8538
8539 if (!COMPLETE_TYPE_P (t))
8540 layout_type (t);
8541 return t;
8542 }
8543
8544 /* Build a function type. The RETURN_TYPE is the type returned by the
8545 function. If VAARGS is set, no void_type_node is appended to the
8546 list. ARGP must be always be terminated be a NULL_TREE. */
8547
8548 static tree
8549 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8550 {
8551 tree t, args, last;
8552
8553 t = va_arg (argp, tree);
8554 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8555 args = tree_cons (NULL_TREE, t, args);
8556
8557 if (vaargs)
8558 {
8559 last = args;
8560 if (args != NULL_TREE)
8561 args = nreverse (args);
8562 gcc_assert (last != void_list_node);
8563 }
8564 else if (args == NULL_TREE)
8565 args = void_list_node;
8566 else
8567 {
8568 last = args;
8569 args = nreverse (args);
8570 TREE_CHAIN (last) = void_list_node;
8571 }
8572 args = build_function_type (return_type, args);
8573
8574 return args;
8575 }
8576
8577 /* Build a function type. The RETURN_TYPE is the type returned by the
8578 function. If additional arguments are provided, they are
8579 additional argument types. The list of argument types must always
8580 be terminated by NULL_TREE. */
8581
8582 tree
8583 build_function_type_list (tree return_type, ...)
8584 {
8585 tree args;
8586 va_list p;
8587
8588 va_start (p, return_type);
8589 args = build_function_type_list_1 (false, return_type, p);
8590 va_end (p);
8591 return args;
8592 }
8593
8594 /* Build a variable argument function type. The RETURN_TYPE is the
8595 type returned by the function. If additional arguments are provided,
8596 they are additional argument types. The list of argument types must
8597 always be terminated by NULL_TREE. */
8598
8599 tree
8600 build_varargs_function_type_list (tree return_type, ...)
8601 {
8602 tree args;
8603 va_list p;
8604
8605 va_start (p, return_type);
8606 args = build_function_type_list_1 (true, return_type, p);
8607 va_end (p);
8608
8609 return args;
8610 }
8611
8612 /* Build a function type. RETURN_TYPE is the type returned by the
8613 function; VAARGS indicates whether the function takes varargs. The
8614 function takes N named arguments, the types of which are provided in
8615 ARG_TYPES. */
8616
8617 static tree
8618 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8619 tree *arg_types)
8620 {
8621 int i;
8622 tree t = vaargs ? NULL_TREE : void_list_node;
8623
8624 for (i = n - 1; i >= 0; i--)
8625 t = tree_cons (NULL_TREE, arg_types[i], t);
8626
8627 return build_function_type (return_type, t);
8628 }
8629
8630 /* Build a function type. RETURN_TYPE is the type returned by the
8631 function. The function takes N named arguments, the types of which
8632 are provided in ARG_TYPES. */
8633
8634 tree
8635 build_function_type_array (tree return_type, int n, tree *arg_types)
8636 {
8637 return build_function_type_array_1 (false, return_type, n, arg_types);
8638 }
8639
8640 /* Build a variable argument function type. RETURN_TYPE is the type
8641 returned by the function. The function takes N named arguments, the
8642 types of which are provided in ARG_TYPES. */
8643
8644 tree
8645 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8646 {
8647 return build_function_type_array_1 (true, return_type, n, arg_types);
8648 }
8649
8650 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8651 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8652 for the method. An implicit additional parameter (of type
8653 pointer-to-BASETYPE) is added to the ARGTYPES. */
8654
8655 tree
8656 build_method_type_directly (tree basetype,
8657 tree rettype,
8658 tree argtypes)
8659 {
8660 tree t;
8661 tree ptype;
8662 inchash::hash hstate;
8663 bool any_structural_p, any_noncanonical_p;
8664 tree canon_argtypes;
8665
8666 /* Make a node of the sort we want. */
8667 t = make_node (METHOD_TYPE);
8668
8669 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8670 TREE_TYPE (t) = rettype;
8671 ptype = build_pointer_type (basetype);
8672
8673 /* The actual arglist for this function includes a "hidden" argument
8674 which is "this". Put it into the list of argument types. */
8675 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8676 TYPE_ARG_TYPES (t) = argtypes;
8677
8678 /* If we already have such a type, use the old one. */
8679 hstate.add_object (TYPE_HASH (basetype));
8680 hstate.add_object (TYPE_HASH (rettype));
8681 type_hash_list (argtypes, hstate);
8682 t = type_hash_canon (hstate.end (), t);
8683
8684 /* Set up the canonical type. */
8685 any_structural_p
8686 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8687 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8688 any_noncanonical_p
8689 = (TYPE_CANONICAL (basetype) != basetype
8690 || TYPE_CANONICAL (rettype) != rettype);
8691 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8692 &any_structural_p,
8693 &any_noncanonical_p);
8694 if (any_structural_p)
8695 SET_TYPE_STRUCTURAL_EQUALITY (t);
8696 else if (any_noncanonical_p)
8697 TYPE_CANONICAL (t)
8698 = build_method_type_directly (TYPE_CANONICAL (basetype),
8699 TYPE_CANONICAL (rettype),
8700 canon_argtypes);
8701 if (!COMPLETE_TYPE_P (t))
8702 layout_type (t);
8703
8704 return t;
8705 }
8706
8707 /* Construct, lay out and return the type of methods belonging to class
8708 BASETYPE and whose arguments and values are described by TYPE.
8709 If that type exists already, reuse it.
8710 TYPE must be a FUNCTION_TYPE node. */
8711
8712 tree
8713 build_method_type (tree basetype, tree type)
8714 {
8715 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8716
8717 return build_method_type_directly (basetype,
8718 TREE_TYPE (type),
8719 TYPE_ARG_TYPES (type));
8720 }
8721
8722 /* Construct, lay out and return the type of offsets to a value
8723 of type TYPE, within an object of type BASETYPE.
8724 If a suitable offset type exists already, reuse it. */
8725
8726 tree
8727 build_offset_type (tree basetype, tree type)
8728 {
8729 tree t;
8730 inchash::hash hstate;
8731
8732 /* Make a node of the sort we want. */
8733 t = make_node (OFFSET_TYPE);
8734
8735 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8736 TREE_TYPE (t) = type;
8737
8738 /* If we already have such a type, use the old one. */
8739 hstate.add_object (TYPE_HASH (basetype));
8740 hstate.add_object (TYPE_HASH (type));
8741 t = type_hash_canon (hstate.end (), t);
8742
8743 if (!COMPLETE_TYPE_P (t))
8744 layout_type (t);
8745
8746 if (TYPE_CANONICAL (t) == t)
8747 {
8748 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8749 || TYPE_STRUCTURAL_EQUALITY_P (type))
8750 SET_TYPE_STRUCTURAL_EQUALITY (t);
8751 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8752 || TYPE_CANONICAL (type) != type)
8753 TYPE_CANONICAL (t)
8754 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8755 TYPE_CANONICAL (type));
8756 }
8757
8758 return t;
8759 }
8760
8761 /* Create a complex type whose components are COMPONENT_TYPE. */
8762
8763 tree
8764 build_complex_type (tree component_type)
8765 {
8766 tree t;
8767 inchash::hash hstate;
8768
8769 gcc_assert (INTEGRAL_TYPE_P (component_type)
8770 || SCALAR_FLOAT_TYPE_P (component_type)
8771 || FIXED_POINT_TYPE_P (component_type));
8772
8773 /* Make a node of the sort we want. */
8774 t = make_node (COMPLEX_TYPE);
8775
8776 TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type);
8777
8778 /* If we already have such a type, use the old one. */
8779 hstate.add_object (TYPE_HASH (component_type));
8780 t = type_hash_canon (hstate.end (), t);
8781
8782 if (!COMPLETE_TYPE_P (t))
8783 layout_type (t);
8784
8785 if (TYPE_CANONICAL (t) == t)
8786 {
8787 if (TYPE_STRUCTURAL_EQUALITY_P (component_type))
8788 SET_TYPE_STRUCTURAL_EQUALITY (t);
8789 else if (TYPE_CANONICAL (component_type) != component_type)
8790 TYPE_CANONICAL (t)
8791 = build_complex_type (TYPE_CANONICAL (component_type));
8792 }
8793
8794 /* We need to create a name, since complex is a fundamental type. */
8795 if (! TYPE_NAME (t))
8796 {
8797 const char *name;
8798 if (component_type == char_type_node)
8799 name = "complex char";
8800 else if (component_type == signed_char_type_node)
8801 name = "complex signed char";
8802 else if (component_type == unsigned_char_type_node)
8803 name = "complex unsigned char";
8804 else if (component_type == short_integer_type_node)
8805 name = "complex short int";
8806 else if (component_type == short_unsigned_type_node)
8807 name = "complex short unsigned int";
8808 else if (component_type == integer_type_node)
8809 name = "complex int";
8810 else if (component_type == unsigned_type_node)
8811 name = "complex unsigned int";
8812 else if (component_type == long_integer_type_node)
8813 name = "complex long int";
8814 else if (component_type == long_unsigned_type_node)
8815 name = "complex long unsigned int";
8816 else if (component_type == long_long_integer_type_node)
8817 name = "complex long long int";
8818 else if (component_type == long_long_unsigned_type_node)
8819 name = "complex long long unsigned int";
8820 else
8821 name = 0;
8822
8823 if (name != 0)
8824 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8825 get_identifier (name), t);
8826 }
8827
8828 return build_qualified_type (t, TYPE_QUALS (component_type));
8829 }
8830
8831 /* If TYPE is a real or complex floating-point type and the target
8832 does not directly support arithmetic on TYPE then return the wider
8833 type to be used for arithmetic on TYPE. Otherwise, return
8834 NULL_TREE. */
8835
8836 tree
8837 excess_precision_type (tree type)
8838 {
8839 if (flag_excess_precision != EXCESS_PRECISION_FAST)
8840 {
8841 int flt_eval_method = TARGET_FLT_EVAL_METHOD;
8842 switch (TREE_CODE (type))
8843 {
8844 case REAL_TYPE:
8845 switch (flt_eval_method)
8846 {
8847 case 1:
8848 if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
8849 return double_type_node;
8850 break;
8851 case 2:
8852 if (TYPE_MODE (type) == TYPE_MODE (float_type_node)
8853 || TYPE_MODE (type) == TYPE_MODE (double_type_node))
8854 return long_double_type_node;
8855 break;
8856 default:
8857 gcc_unreachable ();
8858 }
8859 break;
8860 case COMPLEX_TYPE:
8861 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8862 return NULL_TREE;
8863 switch (flt_eval_method)
8864 {
8865 case 1:
8866 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node))
8867 return complex_double_type_node;
8868 break;
8869 case 2:
8870 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node)
8871 || (TYPE_MODE (TREE_TYPE (type))
8872 == TYPE_MODE (double_type_node)))
8873 return complex_long_double_type_node;
8874 break;
8875 default:
8876 gcc_unreachable ();
8877 }
8878 break;
8879 default:
8880 break;
8881 }
8882 }
8883 return NULL_TREE;
8884 }
8885 \f
8886 /* Return OP, stripped of any conversions to wider types as much as is safe.
8887 Converting the value back to OP's type makes a value equivalent to OP.
8888
8889 If FOR_TYPE is nonzero, we return a value which, if converted to
8890 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8891
8892 OP must have integer, real or enumeral type. Pointers are not allowed!
8893
8894 There are some cases where the obvious value we could return
8895 would regenerate to OP if converted to OP's type,
8896 but would not extend like OP to wider types.
8897 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8898 For example, if OP is (unsigned short)(signed char)-1,
8899 we avoid returning (signed char)-1 if FOR_TYPE is int,
8900 even though extending that to an unsigned short would regenerate OP,
8901 since the result of extending (signed char)-1 to (int)
8902 is different from (int) OP. */
8903
8904 tree
8905 get_unwidened (tree op, tree for_type)
8906 {
8907 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8908 tree type = TREE_TYPE (op);
8909 unsigned final_prec
8910 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8911 int uns
8912 = (for_type != 0 && for_type != type
8913 && final_prec > TYPE_PRECISION (type)
8914 && TYPE_UNSIGNED (type));
8915 tree win = op;
8916
8917 while (CONVERT_EXPR_P (op))
8918 {
8919 int bitschange;
8920
8921 /* TYPE_PRECISION on vector types has different meaning
8922 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8923 so avoid them here. */
8924 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8925 break;
8926
8927 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8928 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8929
8930 /* Truncations are many-one so cannot be removed.
8931 Unless we are later going to truncate down even farther. */
8932 if (bitschange < 0
8933 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8934 break;
8935
8936 /* See what's inside this conversion. If we decide to strip it,
8937 we will set WIN. */
8938 op = TREE_OPERAND (op, 0);
8939
8940 /* If we have not stripped any zero-extensions (uns is 0),
8941 we can strip any kind of extension.
8942 If we have previously stripped a zero-extension,
8943 only zero-extensions can safely be stripped.
8944 Any extension can be stripped if the bits it would produce
8945 are all going to be discarded later by truncating to FOR_TYPE. */
8946
8947 if (bitschange > 0)
8948 {
8949 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8950 win = op;
8951 /* TYPE_UNSIGNED says whether this is a zero-extension.
8952 Let's avoid computing it if it does not affect WIN
8953 and if UNS will not be needed again. */
8954 if ((uns
8955 || CONVERT_EXPR_P (op))
8956 && TYPE_UNSIGNED (TREE_TYPE (op)))
8957 {
8958 uns = 1;
8959 win = op;
8960 }
8961 }
8962 }
8963
8964 /* If we finally reach a constant see if it fits in for_type and
8965 in that case convert it. */
8966 if (for_type
8967 && TREE_CODE (win) == INTEGER_CST
8968 && TREE_TYPE (win) != for_type
8969 && int_fits_type_p (win, for_type))
8970 win = fold_convert (for_type, win);
8971
8972 return win;
8973 }
8974 \f
8975 /* Return OP or a simpler expression for a narrower value
8976 which can be sign-extended or zero-extended to give back OP.
8977 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8978 or 0 if the value should be sign-extended. */
8979
8980 tree
8981 get_narrower (tree op, int *unsignedp_ptr)
8982 {
8983 int uns = 0;
8984 int first = 1;
8985 tree win = op;
8986 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8987
8988 while (TREE_CODE (op) == NOP_EXPR)
8989 {
8990 int bitschange
8991 = (TYPE_PRECISION (TREE_TYPE (op))
8992 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8993
8994 /* Truncations are many-one so cannot be removed. */
8995 if (bitschange < 0)
8996 break;
8997
8998 /* See what's inside this conversion. If we decide to strip it,
8999 we will set WIN. */
9000
9001 if (bitschange > 0)
9002 {
9003 op = TREE_OPERAND (op, 0);
9004 /* An extension: the outermost one can be stripped,
9005 but remember whether it is zero or sign extension. */
9006 if (first)
9007 uns = TYPE_UNSIGNED (TREE_TYPE (op));
9008 /* Otherwise, if a sign extension has been stripped,
9009 only sign extensions can now be stripped;
9010 if a zero extension has been stripped, only zero-extensions. */
9011 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
9012 break;
9013 first = 0;
9014 }
9015 else /* bitschange == 0 */
9016 {
9017 /* A change in nominal type can always be stripped, but we must
9018 preserve the unsignedness. */
9019 if (first)
9020 uns = TYPE_UNSIGNED (TREE_TYPE (op));
9021 first = 0;
9022 op = TREE_OPERAND (op, 0);
9023 /* Keep trying to narrow, but don't assign op to win if it
9024 would turn an integral type into something else. */
9025 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
9026 continue;
9027 }
9028
9029 win = op;
9030 }
9031
9032 if (TREE_CODE (op) == COMPONENT_REF
9033 /* Since type_for_size always gives an integer type. */
9034 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
9035 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
9036 /* Ensure field is laid out already. */
9037 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
9038 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
9039 {
9040 unsigned HOST_WIDE_INT innerprec
9041 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
9042 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
9043 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
9044 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
9045
9046 /* We can get this structure field in a narrower type that fits it,
9047 but the resulting extension to its nominal type (a fullword type)
9048 must satisfy the same conditions as for other extensions.
9049
9050 Do this only for fields that are aligned (not bit-fields),
9051 because when bit-field insns will be used there is no
9052 advantage in doing this. */
9053
9054 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
9055 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
9056 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
9057 && type != 0)
9058 {
9059 if (first)
9060 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
9061 win = fold_convert (type, op);
9062 }
9063 }
9064
9065 *unsignedp_ptr = uns;
9066 return win;
9067 }
9068 \f
9069 /* Returns true if integer constant C has a value that is permissible
9070 for type TYPE (an INTEGER_TYPE). */
9071
9072 bool
9073 int_fits_type_p (const_tree c, const_tree type)
9074 {
9075 tree type_low_bound, type_high_bound;
9076 bool ok_for_low_bound, ok_for_high_bound;
9077 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
9078
9079 retry:
9080 type_low_bound = TYPE_MIN_VALUE (type);
9081 type_high_bound = TYPE_MAX_VALUE (type);
9082
9083 /* If at least one bound of the type is a constant integer, we can check
9084 ourselves and maybe make a decision. If no such decision is possible, but
9085 this type is a subtype, try checking against that. Otherwise, use
9086 fits_to_tree_p, which checks against the precision.
9087
9088 Compute the status for each possibly constant bound, and return if we see
9089 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
9090 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
9091 for "constant known to fit". */
9092
9093 /* Check if c >= type_low_bound. */
9094 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
9095 {
9096 if (tree_int_cst_lt (c, type_low_bound))
9097 return false;
9098 ok_for_low_bound = true;
9099 }
9100 else
9101 ok_for_low_bound = false;
9102
9103 /* Check if c <= type_high_bound. */
9104 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
9105 {
9106 if (tree_int_cst_lt (type_high_bound, c))
9107 return false;
9108 ok_for_high_bound = true;
9109 }
9110 else
9111 ok_for_high_bound = false;
9112
9113 /* If the constant fits both bounds, the result is known. */
9114 if (ok_for_low_bound && ok_for_high_bound)
9115 return true;
9116
9117 /* Perform some generic filtering which may allow making a decision
9118 even if the bounds are not constant. First, negative integers
9119 never fit in unsigned types, */
9120 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (c))
9121 return false;
9122
9123 /* Second, narrower types always fit in wider ones. */
9124 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
9125 return true;
9126
9127 /* Third, unsigned integers with top bit set never fit signed types. */
9128 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
9129 {
9130 int prec = GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (c))) - 1;
9131 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
9132 {
9133 /* When a tree_cst is converted to a wide-int, the precision
9134 is taken from the type. However, if the precision of the
9135 mode underneath the type is smaller than that, it is
9136 possible that the value will not fit. The test below
9137 fails if any bit is set between the sign bit of the
9138 underlying mode and the top bit of the type. */
9139 if (wi::ne_p (wi::zext (c, prec - 1), c))
9140 return false;
9141 }
9142 else if (wi::neg_p (c))
9143 return false;
9144 }
9145
9146 /* If we haven't been able to decide at this point, there nothing more we
9147 can check ourselves here. Look at the base type if we have one and it
9148 has the same precision. */
9149 if (TREE_CODE (type) == INTEGER_TYPE
9150 && TREE_TYPE (type) != 0
9151 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
9152 {
9153 type = TREE_TYPE (type);
9154 goto retry;
9155 }
9156
9157 /* Or to fits_to_tree_p, if nothing else. */
9158 return wi::fits_to_tree_p (c, type);
9159 }
9160
9161 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
9162 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
9163 represented (assuming two's-complement arithmetic) within the bit
9164 precision of the type are returned instead. */
9165
9166 void
9167 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
9168 {
9169 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
9170 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
9171 wi::to_mpz (TYPE_MIN_VALUE (type), min, TYPE_SIGN (type));
9172 else
9173 {
9174 if (TYPE_UNSIGNED (type))
9175 mpz_set_ui (min, 0);
9176 else
9177 {
9178 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
9179 wi::to_mpz (mn, min, SIGNED);
9180 }
9181 }
9182
9183 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
9184 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
9185 wi::to_mpz (TYPE_MAX_VALUE (type), max, TYPE_SIGN (type));
9186 else
9187 {
9188 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
9189 wi::to_mpz (mn, max, TYPE_SIGN (type));
9190 }
9191 }
9192
9193 /* Return true if VAR is an automatic variable defined in function FN. */
9194
9195 bool
9196 auto_var_in_fn_p (const_tree var, const_tree fn)
9197 {
9198 return (DECL_P (var) && DECL_CONTEXT (var) == fn
9199 && ((((TREE_CODE (var) == VAR_DECL && ! DECL_EXTERNAL (var))
9200 || TREE_CODE (var) == PARM_DECL)
9201 && ! TREE_STATIC (var))
9202 || TREE_CODE (var) == LABEL_DECL
9203 || TREE_CODE (var) == RESULT_DECL));
9204 }
9205
9206 /* Subprogram of following function. Called by walk_tree.
9207
9208 Return *TP if it is an automatic variable or parameter of the
9209 function passed in as DATA. */
9210
9211 static tree
9212 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
9213 {
9214 tree fn = (tree) data;
9215
9216 if (TYPE_P (*tp))
9217 *walk_subtrees = 0;
9218
9219 else if (DECL_P (*tp)
9220 && auto_var_in_fn_p (*tp, fn))
9221 return *tp;
9222
9223 return NULL_TREE;
9224 }
9225
9226 /* Returns true if T is, contains, or refers to a type with variable
9227 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
9228 arguments, but not the return type. If FN is nonzero, only return
9229 true if a modifier of the type or position of FN is a variable or
9230 parameter inside FN.
9231
9232 This concept is more general than that of C99 'variably modified types':
9233 in C99, a struct type is never variably modified because a VLA may not
9234 appear as a structure member. However, in GNU C code like:
9235
9236 struct S { int i[f()]; };
9237
9238 is valid, and other languages may define similar constructs. */
9239
9240 bool
9241 variably_modified_type_p (tree type, tree fn)
9242 {
9243 tree t;
9244
9245 /* Test if T is either variable (if FN is zero) or an expression containing
9246 a variable in FN. If TYPE isn't gimplified, return true also if
9247 gimplify_one_sizepos would gimplify the expression into a local
9248 variable. */
9249 #define RETURN_TRUE_IF_VAR(T) \
9250 do { tree _t = (T); \
9251 if (_t != NULL_TREE \
9252 && _t != error_mark_node \
9253 && TREE_CODE (_t) != INTEGER_CST \
9254 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
9255 && (!fn \
9256 || (!TYPE_SIZES_GIMPLIFIED (type) \
9257 && !is_gimple_sizepos (_t)) \
9258 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
9259 return true; } while (0)
9260
9261 if (type == error_mark_node)
9262 return false;
9263
9264 /* If TYPE itself has variable size, it is variably modified. */
9265 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
9266 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
9267
9268 switch (TREE_CODE (type))
9269 {
9270 case POINTER_TYPE:
9271 case REFERENCE_TYPE:
9272 case VECTOR_TYPE:
9273 if (variably_modified_type_p (TREE_TYPE (type), fn))
9274 return true;
9275 break;
9276
9277 case FUNCTION_TYPE:
9278 case METHOD_TYPE:
9279 /* If TYPE is a function type, it is variably modified if the
9280 return type is variably modified. */
9281 if (variably_modified_type_p (TREE_TYPE (type), fn))
9282 return true;
9283 break;
9284
9285 case INTEGER_TYPE:
9286 case REAL_TYPE:
9287 case FIXED_POINT_TYPE:
9288 case ENUMERAL_TYPE:
9289 case BOOLEAN_TYPE:
9290 /* Scalar types are variably modified if their end points
9291 aren't constant. */
9292 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
9293 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
9294 break;
9295
9296 case RECORD_TYPE:
9297 case UNION_TYPE:
9298 case QUAL_UNION_TYPE:
9299 /* We can't see if any of the fields are variably-modified by the
9300 definition we normally use, since that would produce infinite
9301 recursion via pointers. */
9302 /* This is variably modified if some field's type is. */
9303 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
9304 if (TREE_CODE (t) == FIELD_DECL)
9305 {
9306 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
9307 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
9308 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
9309
9310 if (TREE_CODE (type) == QUAL_UNION_TYPE)
9311 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
9312 }
9313 break;
9314
9315 case ARRAY_TYPE:
9316 /* Do not call ourselves to avoid infinite recursion. This is
9317 variably modified if the element type is. */
9318 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
9319 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
9320 break;
9321
9322 default:
9323 break;
9324 }
9325
9326 /* The current language may have other cases to check, but in general,
9327 all other types are not variably modified. */
9328 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
9329
9330 #undef RETURN_TRUE_IF_VAR
9331 }
9332
9333 /* Given a DECL or TYPE, return the scope in which it was declared, or
9334 NULL_TREE if there is no containing scope. */
9335
9336 tree
9337 get_containing_scope (const_tree t)
9338 {
9339 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
9340 }
9341
9342 /* Return the innermost context enclosing DECL that is
9343 a FUNCTION_DECL, or zero if none. */
9344
9345 tree
9346 decl_function_context (const_tree decl)
9347 {
9348 tree context;
9349
9350 if (TREE_CODE (decl) == ERROR_MARK)
9351 return 0;
9352
9353 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
9354 where we look up the function at runtime. Such functions always take
9355 a first argument of type 'pointer to real context'.
9356
9357 C++ should really be fixed to use DECL_CONTEXT for the real context,
9358 and use something else for the "virtual context". */
9359 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
9360 context
9361 = TYPE_MAIN_VARIANT
9362 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
9363 else
9364 context = DECL_CONTEXT (decl);
9365
9366 while (context && TREE_CODE (context) != FUNCTION_DECL)
9367 {
9368 if (TREE_CODE (context) == BLOCK)
9369 context = BLOCK_SUPERCONTEXT (context);
9370 else
9371 context = get_containing_scope (context);
9372 }
9373
9374 return context;
9375 }
9376
9377 /* Return the innermost context enclosing DECL that is
9378 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
9379 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
9380
9381 tree
9382 decl_type_context (const_tree decl)
9383 {
9384 tree context = DECL_CONTEXT (decl);
9385
9386 while (context)
9387 switch (TREE_CODE (context))
9388 {
9389 case NAMESPACE_DECL:
9390 case TRANSLATION_UNIT_DECL:
9391 return NULL_TREE;
9392
9393 case RECORD_TYPE:
9394 case UNION_TYPE:
9395 case QUAL_UNION_TYPE:
9396 return context;
9397
9398 case TYPE_DECL:
9399 case FUNCTION_DECL:
9400 context = DECL_CONTEXT (context);
9401 break;
9402
9403 case BLOCK:
9404 context = BLOCK_SUPERCONTEXT (context);
9405 break;
9406
9407 default:
9408 gcc_unreachable ();
9409 }
9410
9411 return NULL_TREE;
9412 }
9413
9414 /* CALL is a CALL_EXPR. Return the declaration for the function
9415 called, or NULL_TREE if the called function cannot be
9416 determined. */
9417
9418 tree
9419 get_callee_fndecl (const_tree call)
9420 {
9421 tree addr;
9422
9423 if (call == error_mark_node)
9424 return error_mark_node;
9425
9426 /* It's invalid to call this function with anything but a
9427 CALL_EXPR. */
9428 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9429
9430 /* The first operand to the CALL is the address of the function
9431 called. */
9432 addr = CALL_EXPR_FN (call);
9433
9434 /* If there is no function, return early. */
9435 if (addr == NULL_TREE)
9436 return NULL_TREE;
9437
9438 STRIP_NOPS (addr);
9439
9440 /* If this is a readonly function pointer, extract its initial value. */
9441 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
9442 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
9443 && DECL_INITIAL (addr))
9444 addr = DECL_INITIAL (addr);
9445
9446 /* If the address is just `&f' for some function `f', then we know
9447 that `f' is being called. */
9448 if (TREE_CODE (addr) == ADDR_EXPR
9449 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
9450 return TREE_OPERAND (addr, 0);
9451
9452 /* We couldn't figure out what was being called. */
9453 return NULL_TREE;
9454 }
9455
9456 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
9457 return the associated function code, otherwise return CFN_LAST. */
9458
9459 combined_fn
9460 get_call_combined_fn (const_tree call)
9461 {
9462 /* It's invalid to call this function with anything but a CALL_EXPR. */
9463 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9464
9465 if (!CALL_EXPR_FN (call))
9466 return as_combined_fn (CALL_EXPR_IFN (call));
9467
9468 tree fndecl = get_callee_fndecl (call);
9469 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
9470 return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
9471
9472 return CFN_LAST;
9473 }
9474
9475 #define TREE_MEM_USAGE_SPACES 40
9476
9477 /* Print debugging information about tree nodes generated during the compile,
9478 and any language-specific information. */
9479
9480 void
9481 dump_tree_statistics (void)
9482 {
9483 if (GATHER_STATISTICS)
9484 {
9485 int i;
9486 int total_nodes, total_bytes;
9487 fprintf (stderr, "\nKind Nodes Bytes\n");
9488 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9489 total_nodes = total_bytes = 0;
9490 for (i = 0; i < (int) all_kinds; i++)
9491 {
9492 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i],
9493 tree_node_counts[i], tree_node_sizes[i]);
9494 total_nodes += tree_node_counts[i];
9495 total_bytes += tree_node_sizes[i];
9496 }
9497 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9498 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes);
9499 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9500 fprintf (stderr, "Code Nodes\n");
9501 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9502 for (i = 0; i < (int) MAX_TREE_CODES; i++)
9503 fprintf (stderr, "%-32s %7d\n", get_tree_code_name ((enum tree_code) i),
9504 tree_code_counts[i]);
9505 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9506 fprintf (stderr, "\n");
9507 ssanames_print_statistics ();
9508 fprintf (stderr, "\n");
9509 phinodes_print_statistics ();
9510 fprintf (stderr, "\n");
9511 }
9512 else
9513 fprintf (stderr, "(No per-node statistics)\n");
9514
9515 print_type_hash_statistics ();
9516 print_debug_expr_statistics ();
9517 print_value_expr_statistics ();
9518 lang_hooks.print_statistics ();
9519 }
9520 \f
9521 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9522
9523 /* Generate a crc32 of a byte. */
9524
9525 static unsigned
9526 crc32_unsigned_bits (unsigned chksum, unsigned value, unsigned bits)
9527 {
9528 unsigned ix;
9529
9530 for (ix = bits; ix--; value <<= 1)
9531 {
9532 unsigned feedback;
9533
9534 feedback = (value ^ chksum) & 0x80000000 ? 0x04c11db7 : 0;
9535 chksum <<= 1;
9536 chksum ^= feedback;
9537 }
9538 return chksum;
9539 }
9540
9541 /* Generate a crc32 of a 32-bit unsigned. */
9542
9543 unsigned
9544 crc32_unsigned (unsigned chksum, unsigned value)
9545 {
9546 return crc32_unsigned_bits (chksum, value, 32);
9547 }
9548
9549 /* Generate a crc32 of a byte. */
9550
9551 unsigned
9552 crc32_byte (unsigned chksum, char byte)
9553 {
9554 return crc32_unsigned_bits (chksum, (unsigned) byte << 24, 8);
9555 }
9556
9557 /* Generate a crc32 of a string. */
9558
9559 unsigned
9560 crc32_string (unsigned chksum, const char *string)
9561 {
9562 do
9563 {
9564 chksum = crc32_byte (chksum, *string);
9565 }
9566 while (*string++);
9567 return chksum;
9568 }
9569
9570 /* P is a string that will be used in a symbol. Mask out any characters
9571 that are not valid in that context. */
9572
9573 void
9574 clean_symbol_name (char *p)
9575 {
9576 for (; *p; p++)
9577 if (! (ISALNUM (*p)
9578 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9579 || *p == '$'
9580 #endif
9581 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9582 || *p == '.'
9583 #endif
9584 ))
9585 *p = '_';
9586 }
9587
9588 /* For anonymous aggregate types, we need some sort of name to
9589 hold on to. In practice, this should not appear, but it should
9590 not be harmful if it does. */
9591 bool
9592 anon_aggrname_p(const_tree id_node)
9593 {
9594 #ifndef NO_DOT_IN_LABEL
9595 return (IDENTIFIER_POINTER (id_node)[0] == '.'
9596 && IDENTIFIER_POINTER (id_node)[1] == '_');
9597 #else /* NO_DOT_IN_LABEL */
9598 #ifndef NO_DOLLAR_IN_LABEL
9599 return (IDENTIFIER_POINTER (id_node)[0] == '$' \
9600 && IDENTIFIER_POINTER (id_node)[1] == '_');
9601 #else /* NO_DOLLAR_IN_LABEL */
9602 #define ANON_AGGRNAME_PREFIX "__anon_"
9603 return (!strncmp (IDENTIFIER_POINTER (id_node), ANON_AGGRNAME_PREFIX,
9604 sizeof (ANON_AGGRNAME_PREFIX) - 1));
9605 #endif /* NO_DOLLAR_IN_LABEL */
9606 #endif /* NO_DOT_IN_LABEL */
9607 }
9608
9609 /* Return a format for an anonymous aggregate name. */
9610 const char *
9611 anon_aggrname_format()
9612 {
9613 #ifndef NO_DOT_IN_LABEL
9614 return "._%d";
9615 #else /* NO_DOT_IN_LABEL */
9616 #ifndef NO_DOLLAR_IN_LABEL
9617 return "$_%d";
9618 #else /* NO_DOLLAR_IN_LABEL */
9619 return "__anon_%d";
9620 #endif /* NO_DOLLAR_IN_LABEL */
9621 #endif /* NO_DOT_IN_LABEL */
9622 }
9623
9624 /* Generate a name for a special-purpose function.
9625 The generated name may need to be unique across the whole link.
9626 Changes to this function may also require corresponding changes to
9627 xstrdup_mask_random.
9628 TYPE is some string to identify the purpose of this function to the
9629 linker or collect2; it must start with an uppercase letter,
9630 one of:
9631 I - for constructors
9632 D - for destructors
9633 N - for C++ anonymous namespaces
9634 F - for DWARF unwind frame information. */
9635
9636 tree
9637 get_file_function_name (const char *type)
9638 {
9639 char *buf;
9640 const char *p;
9641 char *q;
9642
9643 /* If we already have a name we know to be unique, just use that. */
9644 if (first_global_object_name)
9645 p = q = ASTRDUP (first_global_object_name);
9646 /* If the target is handling the constructors/destructors, they
9647 will be local to this file and the name is only necessary for
9648 debugging purposes.
9649 We also assign sub_I and sub_D sufixes to constructors called from
9650 the global static constructors. These are always local. */
9651 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9652 || (strncmp (type, "sub_", 4) == 0
9653 && (type[4] == 'I' || type[4] == 'D')))
9654 {
9655 const char *file = main_input_filename;
9656 if (! file)
9657 file = LOCATION_FILE (input_location);
9658 /* Just use the file's basename, because the full pathname
9659 might be quite long. */
9660 p = q = ASTRDUP (lbasename (file));
9661 }
9662 else
9663 {
9664 /* Otherwise, the name must be unique across the entire link.
9665 We don't have anything that we know to be unique to this translation
9666 unit, so use what we do have and throw in some randomness. */
9667 unsigned len;
9668 const char *name = weak_global_object_name;
9669 const char *file = main_input_filename;
9670
9671 if (! name)
9672 name = "";
9673 if (! file)
9674 file = LOCATION_FILE (input_location);
9675
9676 len = strlen (file);
9677 q = (char *) alloca (9 + 17 + len + 1);
9678 memcpy (q, file, len + 1);
9679
9680 snprintf (q + len, 9 + 17 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9681 crc32_string (0, name), get_random_seed (false));
9682
9683 p = q;
9684 }
9685
9686 clean_symbol_name (q);
9687 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9688 + strlen (type));
9689
9690 /* Set up the name of the file-level functions we may need.
9691 Use a global object (which is already required to be unique over
9692 the program) rather than the file name (which imposes extra
9693 constraints). */
9694 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9695
9696 return get_identifier (buf);
9697 }
9698 \f
9699 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9700
9701 /* Complain that the tree code of NODE does not match the expected 0
9702 terminated list of trailing codes. The trailing code list can be
9703 empty, for a more vague error message. FILE, LINE, and FUNCTION
9704 are of the caller. */
9705
9706 void
9707 tree_check_failed (const_tree node, const char *file,
9708 int line, const char *function, ...)
9709 {
9710 va_list args;
9711 const char *buffer;
9712 unsigned length = 0;
9713 enum tree_code code;
9714
9715 va_start (args, function);
9716 while ((code = (enum tree_code) va_arg (args, int)))
9717 length += 4 + strlen (get_tree_code_name (code));
9718 va_end (args);
9719 if (length)
9720 {
9721 char *tmp;
9722 va_start (args, function);
9723 length += strlen ("expected ");
9724 buffer = tmp = (char *) alloca (length);
9725 length = 0;
9726 while ((code = (enum tree_code) va_arg (args, int)))
9727 {
9728 const char *prefix = length ? " or " : "expected ";
9729
9730 strcpy (tmp + length, prefix);
9731 length += strlen (prefix);
9732 strcpy (tmp + length, get_tree_code_name (code));
9733 length += strlen (get_tree_code_name (code));
9734 }
9735 va_end (args);
9736 }
9737 else
9738 buffer = "unexpected node";
9739
9740 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9741 buffer, get_tree_code_name (TREE_CODE (node)),
9742 function, trim_filename (file), line);
9743 }
9744
9745 /* Complain that the tree code of NODE does match the expected 0
9746 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9747 the caller. */
9748
9749 void
9750 tree_not_check_failed (const_tree node, const char *file,
9751 int line, const char *function, ...)
9752 {
9753 va_list args;
9754 char *buffer;
9755 unsigned length = 0;
9756 enum tree_code code;
9757
9758 va_start (args, function);
9759 while ((code = (enum tree_code) va_arg (args, int)))
9760 length += 4 + strlen (get_tree_code_name (code));
9761 va_end (args);
9762 va_start (args, function);
9763 buffer = (char *) alloca (length);
9764 length = 0;
9765 while ((code = (enum tree_code) va_arg (args, int)))
9766 {
9767 if (length)
9768 {
9769 strcpy (buffer + length, " or ");
9770 length += 4;
9771 }
9772 strcpy (buffer + length, get_tree_code_name (code));
9773 length += strlen (get_tree_code_name (code));
9774 }
9775 va_end (args);
9776
9777 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9778 buffer, get_tree_code_name (TREE_CODE (node)),
9779 function, trim_filename (file), line);
9780 }
9781
9782 /* Similar to tree_check_failed, except that we check for a class of tree
9783 code, given in CL. */
9784
9785 void
9786 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9787 const char *file, int line, const char *function)
9788 {
9789 internal_error
9790 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9791 TREE_CODE_CLASS_STRING (cl),
9792 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9793 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9794 }
9795
9796 /* Similar to tree_check_failed, except that instead of specifying a
9797 dozen codes, use the knowledge that they're all sequential. */
9798
9799 void
9800 tree_range_check_failed (const_tree node, const char *file, int line,
9801 const char *function, enum tree_code c1,
9802 enum tree_code c2)
9803 {
9804 char *buffer;
9805 unsigned length = 0;
9806 unsigned int c;
9807
9808 for (c = c1; c <= c2; ++c)
9809 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9810
9811 length += strlen ("expected ");
9812 buffer = (char *) alloca (length);
9813 length = 0;
9814
9815 for (c = c1; c <= c2; ++c)
9816 {
9817 const char *prefix = length ? " or " : "expected ";
9818
9819 strcpy (buffer + length, prefix);
9820 length += strlen (prefix);
9821 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9822 length += strlen (get_tree_code_name ((enum tree_code) c));
9823 }
9824
9825 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9826 buffer, get_tree_code_name (TREE_CODE (node)),
9827 function, trim_filename (file), line);
9828 }
9829
9830
9831 /* Similar to tree_check_failed, except that we check that a tree does
9832 not have the specified code, given in CL. */
9833
9834 void
9835 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9836 const char *file, int line, const char *function)
9837 {
9838 internal_error
9839 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9840 TREE_CODE_CLASS_STRING (cl),
9841 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9842 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9843 }
9844
9845
9846 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9847
9848 void
9849 omp_clause_check_failed (const_tree node, const char *file, int line,
9850 const char *function, enum omp_clause_code code)
9851 {
9852 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9853 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9854 function, trim_filename (file), line);
9855 }
9856
9857
9858 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9859
9860 void
9861 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9862 const char *function, enum omp_clause_code c1,
9863 enum omp_clause_code c2)
9864 {
9865 char *buffer;
9866 unsigned length = 0;
9867 unsigned int c;
9868
9869 for (c = c1; c <= c2; ++c)
9870 length += 4 + strlen (omp_clause_code_name[c]);
9871
9872 length += strlen ("expected ");
9873 buffer = (char *) alloca (length);
9874 length = 0;
9875
9876 for (c = c1; c <= c2; ++c)
9877 {
9878 const char *prefix = length ? " or " : "expected ";
9879
9880 strcpy (buffer + length, prefix);
9881 length += strlen (prefix);
9882 strcpy (buffer + length, omp_clause_code_name[c]);
9883 length += strlen (omp_clause_code_name[c]);
9884 }
9885
9886 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9887 buffer, omp_clause_code_name[TREE_CODE (node)],
9888 function, trim_filename (file), line);
9889 }
9890
9891
9892 #undef DEFTREESTRUCT
9893 #define DEFTREESTRUCT(VAL, NAME) NAME,
9894
9895 static const char *ts_enum_names[] = {
9896 #include "treestruct.def"
9897 };
9898 #undef DEFTREESTRUCT
9899
9900 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9901
9902 /* Similar to tree_class_check_failed, except that we check for
9903 whether CODE contains the tree structure identified by EN. */
9904
9905 void
9906 tree_contains_struct_check_failed (const_tree node,
9907 const enum tree_node_structure_enum en,
9908 const char *file, int line,
9909 const char *function)
9910 {
9911 internal_error
9912 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9913 TS_ENUM_NAME (en),
9914 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9915 }
9916
9917
9918 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9919 (dynamically sized) vector. */
9920
9921 void
9922 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9923 const char *function)
9924 {
9925 internal_error
9926 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9927 idx + 1, len, function, trim_filename (file), line);
9928 }
9929
9930 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9931 (dynamically sized) vector. */
9932
9933 void
9934 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9935 const char *function)
9936 {
9937 internal_error
9938 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9939 idx + 1, len, function, trim_filename (file), line);
9940 }
9941
9942 /* Similar to above, except that the check is for the bounds of the operand
9943 vector of an expression node EXP. */
9944
9945 void
9946 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9947 int line, const char *function)
9948 {
9949 enum tree_code code = TREE_CODE (exp);
9950 internal_error
9951 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9952 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9953 function, trim_filename (file), line);
9954 }
9955
9956 /* Similar to above, except that the check is for the number of
9957 operands of an OMP_CLAUSE node. */
9958
9959 void
9960 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9961 int line, const char *function)
9962 {
9963 internal_error
9964 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9965 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9966 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9967 trim_filename (file), line);
9968 }
9969 #endif /* ENABLE_TREE_CHECKING */
9970 \f
9971 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9972 and mapped to the machine mode MODE. Initialize its fields and build
9973 the information necessary for debugging output. */
9974
9975 static tree
9976 make_vector_type (tree innertype, int nunits, machine_mode mode)
9977 {
9978 tree t;
9979 inchash::hash hstate;
9980 tree mv_innertype = TYPE_MAIN_VARIANT (innertype);
9981
9982 t = make_node (VECTOR_TYPE);
9983 TREE_TYPE (t) = mv_innertype;
9984 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9985 SET_TYPE_MODE (t, mode);
9986
9987 if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype) || in_lto_p)
9988 SET_TYPE_STRUCTURAL_EQUALITY (t);
9989 else if ((TYPE_CANONICAL (mv_innertype) != innertype
9990 || mode != VOIDmode)
9991 && !VECTOR_BOOLEAN_TYPE_P (t))
9992 TYPE_CANONICAL (t)
9993 = make_vector_type (TYPE_CANONICAL (mv_innertype), nunits, VOIDmode);
9994
9995 layout_type (t);
9996
9997 hstate.add_wide_int (VECTOR_TYPE);
9998 hstate.add_wide_int (nunits);
9999 hstate.add_wide_int (mode);
10000 hstate.add_object (TYPE_HASH (TREE_TYPE (t)));
10001 t = type_hash_canon (hstate.end (), t);
10002
10003 /* We have built a main variant, based on the main variant of the
10004 inner type. Use it to build the variant we return. */
10005 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
10006 && TREE_TYPE (t) != innertype)
10007 return build_type_attribute_qual_variant (t,
10008 TYPE_ATTRIBUTES (innertype),
10009 TYPE_QUALS (innertype));
10010
10011 return t;
10012 }
10013
10014 static tree
10015 make_or_reuse_type (unsigned size, int unsignedp)
10016 {
10017 int i;
10018
10019 if (size == INT_TYPE_SIZE)
10020 return unsignedp ? unsigned_type_node : integer_type_node;
10021 if (size == CHAR_TYPE_SIZE)
10022 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
10023 if (size == SHORT_TYPE_SIZE)
10024 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
10025 if (size == LONG_TYPE_SIZE)
10026 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
10027 if (size == LONG_LONG_TYPE_SIZE)
10028 return (unsignedp ? long_long_unsigned_type_node
10029 : long_long_integer_type_node);
10030
10031 for (i = 0; i < NUM_INT_N_ENTS; i ++)
10032 if (size == int_n_data[i].bitsize
10033 && int_n_enabled_p[i])
10034 return (unsignedp ? int_n_trees[i].unsigned_type
10035 : int_n_trees[i].signed_type);
10036
10037 if (unsignedp)
10038 return make_unsigned_type (size);
10039 else
10040 return make_signed_type (size);
10041 }
10042
10043 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
10044
10045 static tree
10046 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
10047 {
10048 if (satp)
10049 {
10050 if (size == SHORT_FRACT_TYPE_SIZE)
10051 return unsignedp ? sat_unsigned_short_fract_type_node
10052 : sat_short_fract_type_node;
10053 if (size == FRACT_TYPE_SIZE)
10054 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
10055 if (size == LONG_FRACT_TYPE_SIZE)
10056 return unsignedp ? sat_unsigned_long_fract_type_node
10057 : sat_long_fract_type_node;
10058 if (size == LONG_LONG_FRACT_TYPE_SIZE)
10059 return unsignedp ? sat_unsigned_long_long_fract_type_node
10060 : sat_long_long_fract_type_node;
10061 }
10062 else
10063 {
10064 if (size == SHORT_FRACT_TYPE_SIZE)
10065 return unsignedp ? unsigned_short_fract_type_node
10066 : short_fract_type_node;
10067 if (size == FRACT_TYPE_SIZE)
10068 return unsignedp ? unsigned_fract_type_node : fract_type_node;
10069 if (size == LONG_FRACT_TYPE_SIZE)
10070 return unsignedp ? unsigned_long_fract_type_node
10071 : long_fract_type_node;
10072 if (size == LONG_LONG_FRACT_TYPE_SIZE)
10073 return unsignedp ? unsigned_long_long_fract_type_node
10074 : long_long_fract_type_node;
10075 }
10076
10077 return make_fract_type (size, unsignedp, satp);
10078 }
10079
10080 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
10081
10082 static tree
10083 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
10084 {
10085 if (satp)
10086 {
10087 if (size == SHORT_ACCUM_TYPE_SIZE)
10088 return unsignedp ? sat_unsigned_short_accum_type_node
10089 : sat_short_accum_type_node;
10090 if (size == ACCUM_TYPE_SIZE)
10091 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
10092 if (size == LONG_ACCUM_TYPE_SIZE)
10093 return unsignedp ? sat_unsigned_long_accum_type_node
10094 : sat_long_accum_type_node;
10095 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
10096 return unsignedp ? sat_unsigned_long_long_accum_type_node
10097 : sat_long_long_accum_type_node;
10098 }
10099 else
10100 {
10101 if (size == SHORT_ACCUM_TYPE_SIZE)
10102 return unsignedp ? unsigned_short_accum_type_node
10103 : short_accum_type_node;
10104 if (size == ACCUM_TYPE_SIZE)
10105 return unsignedp ? unsigned_accum_type_node : accum_type_node;
10106 if (size == LONG_ACCUM_TYPE_SIZE)
10107 return unsignedp ? unsigned_long_accum_type_node
10108 : long_accum_type_node;
10109 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
10110 return unsignedp ? unsigned_long_long_accum_type_node
10111 : long_long_accum_type_node;
10112 }
10113
10114 return make_accum_type (size, unsignedp, satp);
10115 }
10116
10117
10118 /* Create an atomic variant node for TYPE. This routine is called
10119 during initialization of data types to create the 5 basic atomic
10120 types. The generic build_variant_type function requires these to
10121 already be set up in order to function properly, so cannot be
10122 called from there. If ALIGN is non-zero, then ensure alignment is
10123 overridden to this value. */
10124
10125 static tree
10126 build_atomic_base (tree type, unsigned int align)
10127 {
10128 tree t;
10129
10130 /* Make sure its not already registered. */
10131 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
10132 return t;
10133
10134 t = build_variant_type_copy (type);
10135 set_type_quals (t, TYPE_QUAL_ATOMIC);
10136
10137 if (align)
10138 SET_TYPE_ALIGN (t, align);
10139
10140 return t;
10141 }
10142
10143 /* Create nodes for all integer types (and error_mark_node) using the sizes
10144 of C datatypes. SIGNED_CHAR specifies whether char is signed. */
10145
10146 void
10147 build_common_tree_nodes (bool signed_char)
10148 {
10149 int i;
10150
10151 error_mark_node = make_node (ERROR_MARK);
10152 TREE_TYPE (error_mark_node) = error_mark_node;
10153
10154 initialize_sizetypes ();
10155
10156 /* Define both `signed char' and `unsigned char'. */
10157 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
10158 TYPE_STRING_FLAG (signed_char_type_node) = 1;
10159 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
10160 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
10161
10162 /* Define `char', which is like either `signed char' or `unsigned char'
10163 but not the same as either. */
10164 char_type_node
10165 = (signed_char
10166 ? make_signed_type (CHAR_TYPE_SIZE)
10167 : make_unsigned_type (CHAR_TYPE_SIZE));
10168 TYPE_STRING_FLAG (char_type_node) = 1;
10169
10170 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
10171 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
10172 integer_type_node = make_signed_type (INT_TYPE_SIZE);
10173 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
10174 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
10175 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
10176 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
10177 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
10178
10179 for (i = 0; i < NUM_INT_N_ENTS; i ++)
10180 {
10181 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
10182 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
10183 TYPE_SIZE (int_n_trees[i].signed_type) = bitsize_int (int_n_data[i].bitsize);
10184 TYPE_SIZE (int_n_trees[i].unsigned_type) = bitsize_int (int_n_data[i].bitsize);
10185
10186 if (int_n_data[i].bitsize > LONG_LONG_TYPE_SIZE
10187 && int_n_enabled_p[i])
10188 {
10189 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
10190 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
10191 }
10192 }
10193
10194 /* Define a boolean type. This type only represents boolean values but
10195 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
10196 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
10197 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
10198 TYPE_PRECISION (boolean_type_node) = 1;
10199 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
10200
10201 /* Define what type to use for size_t. */
10202 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
10203 size_type_node = unsigned_type_node;
10204 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
10205 size_type_node = long_unsigned_type_node;
10206 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
10207 size_type_node = long_long_unsigned_type_node;
10208 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
10209 size_type_node = short_unsigned_type_node;
10210 else
10211 {
10212 int i;
10213
10214 size_type_node = NULL_TREE;
10215 for (i = 0; i < NUM_INT_N_ENTS; i++)
10216 if (int_n_enabled_p[i])
10217 {
10218 char name[50];
10219 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
10220
10221 if (strcmp (name, SIZE_TYPE) == 0)
10222 {
10223 size_type_node = int_n_trees[i].unsigned_type;
10224 }
10225 }
10226 if (size_type_node == NULL_TREE)
10227 gcc_unreachable ();
10228 }
10229
10230 /* Fill in the rest of the sized types. Reuse existing type nodes
10231 when possible. */
10232 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
10233 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
10234 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
10235 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
10236 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
10237
10238 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
10239 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
10240 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
10241 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
10242 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
10243
10244 /* Don't call build_qualified type for atomics. That routine does
10245 special processing for atomics, and until they are initialized
10246 it's better not to make that call.
10247
10248 Check to see if there is a target override for atomic types. */
10249
10250 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
10251 targetm.atomic_align_for_mode (QImode));
10252 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
10253 targetm.atomic_align_for_mode (HImode));
10254 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
10255 targetm.atomic_align_for_mode (SImode));
10256 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
10257 targetm.atomic_align_for_mode (DImode));
10258 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
10259 targetm.atomic_align_for_mode (TImode));
10260
10261 access_public_node = get_identifier ("public");
10262 access_protected_node = get_identifier ("protected");
10263 access_private_node = get_identifier ("private");
10264
10265 /* Define these next since types below may used them. */
10266 integer_zero_node = build_int_cst (integer_type_node, 0);
10267 integer_one_node = build_int_cst (integer_type_node, 1);
10268 integer_three_node = build_int_cst (integer_type_node, 3);
10269 integer_minus_one_node = build_int_cst (integer_type_node, -1);
10270
10271 size_zero_node = size_int (0);
10272 size_one_node = size_int (1);
10273 bitsize_zero_node = bitsize_int (0);
10274 bitsize_one_node = bitsize_int (1);
10275 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
10276
10277 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
10278 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
10279
10280 void_type_node = make_node (VOID_TYPE);
10281 layout_type (void_type_node);
10282
10283 pointer_bounds_type_node = targetm.chkp_bound_type ();
10284
10285 /* We are not going to have real types in C with less than byte alignment,
10286 so we might as well not have any types that claim to have it. */
10287 SET_TYPE_ALIGN (void_type_node, BITS_PER_UNIT);
10288 TYPE_USER_ALIGN (void_type_node) = 0;
10289
10290 void_node = make_node (VOID_CST);
10291 TREE_TYPE (void_node) = void_type_node;
10292
10293 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
10294 layout_type (TREE_TYPE (null_pointer_node));
10295
10296 ptr_type_node = build_pointer_type (void_type_node);
10297 const_ptr_type_node
10298 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
10299 fileptr_type_node = ptr_type_node;
10300
10301 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
10302
10303 float_type_node = make_node (REAL_TYPE);
10304 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
10305 layout_type (float_type_node);
10306
10307 double_type_node = make_node (REAL_TYPE);
10308 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
10309 layout_type (double_type_node);
10310
10311 long_double_type_node = make_node (REAL_TYPE);
10312 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
10313 layout_type (long_double_type_node);
10314
10315 float_ptr_type_node = build_pointer_type (float_type_node);
10316 double_ptr_type_node = build_pointer_type (double_type_node);
10317 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
10318 integer_ptr_type_node = build_pointer_type (integer_type_node);
10319
10320 /* Fixed size integer types. */
10321 uint16_type_node = make_or_reuse_type (16, 1);
10322 uint32_type_node = make_or_reuse_type (32, 1);
10323 uint64_type_node = make_or_reuse_type (64, 1);
10324
10325 /* Decimal float types. */
10326 dfloat32_type_node = make_node (REAL_TYPE);
10327 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
10328 layout_type (dfloat32_type_node);
10329 SET_TYPE_MODE (dfloat32_type_node, SDmode);
10330 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
10331
10332 dfloat64_type_node = make_node (REAL_TYPE);
10333 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
10334 layout_type (dfloat64_type_node);
10335 SET_TYPE_MODE (dfloat64_type_node, DDmode);
10336 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
10337
10338 dfloat128_type_node = make_node (REAL_TYPE);
10339 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
10340 layout_type (dfloat128_type_node);
10341 SET_TYPE_MODE (dfloat128_type_node, TDmode);
10342 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
10343
10344 complex_integer_type_node = build_complex_type (integer_type_node);
10345 complex_float_type_node = build_complex_type (float_type_node);
10346 complex_double_type_node = build_complex_type (double_type_node);
10347 complex_long_double_type_node = build_complex_type (long_double_type_node);
10348
10349 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
10350 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
10351 sat_ ## KIND ## _type_node = \
10352 make_sat_signed_ ## KIND ## _type (SIZE); \
10353 sat_unsigned_ ## KIND ## _type_node = \
10354 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10355 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10356 unsigned_ ## KIND ## _type_node = \
10357 make_unsigned_ ## KIND ## _type (SIZE);
10358
10359 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
10360 sat_ ## WIDTH ## KIND ## _type_node = \
10361 make_sat_signed_ ## KIND ## _type (SIZE); \
10362 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
10363 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10364 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10365 unsigned_ ## WIDTH ## KIND ## _type_node = \
10366 make_unsigned_ ## KIND ## _type (SIZE);
10367
10368 /* Make fixed-point type nodes based on four different widths. */
10369 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
10370 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
10371 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
10372 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
10373 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
10374
10375 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
10376 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
10377 NAME ## _type_node = \
10378 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
10379 u ## NAME ## _type_node = \
10380 make_or_reuse_unsigned_ ## KIND ## _type \
10381 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
10382 sat_ ## NAME ## _type_node = \
10383 make_or_reuse_sat_signed_ ## KIND ## _type \
10384 (GET_MODE_BITSIZE (MODE ## mode)); \
10385 sat_u ## NAME ## _type_node = \
10386 make_or_reuse_sat_unsigned_ ## KIND ## _type \
10387 (GET_MODE_BITSIZE (U ## MODE ## mode));
10388
10389 /* Fixed-point type and mode nodes. */
10390 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
10391 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
10392 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
10393 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
10394 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
10395 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
10396 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
10397 MAKE_FIXED_MODE_NODE (accum, ha, HA)
10398 MAKE_FIXED_MODE_NODE (accum, sa, SA)
10399 MAKE_FIXED_MODE_NODE (accum, da, DA)
10400 MAKE_FIXED_MODE_NODE (accum, ta, TA)
10401
10402 {
10403 tree t = targetm.build_builtin_va_list ();
10404
10405 /* Many back-ends define record types without setting TYPE_NAME.
10406 If we copied the record type here, we'd keep the original
10407 record type without a name. This breaks name mangling. So,
10408 don't copy record types and let c_common_nodes_and_builtins()
10409 declare the type to be __builtin_va_list. */
10410 if (TREE_CODE (t) != RECORD_TYPE)
10411 t = build_variant_type_copy (t);
10412
10413 va_list_type_node = t;
10414 }
10415 }
10416
10417 /* Modify DECL for given flags.
10418 TM_PURE attribute is set only on types, so the function will modify
10419 DECL's type when ECF_TM_PURE is used. */
10420
10421 void
10422 set_call_expr_flags (tree decl, int flags)
10423 {
10424 if (flags & ECF_NOTHROW)
10425 TREE_NOTHROW (decl) = 1;
10426 if (flags & ECF_CONST)
10427 TREE_READONLY (decl) = 1;
10428 if (flags & ECF_PURE)
10429 DECL_PURE_P (decl) = 1;
10430 if (flags & ECF_LOOPING_CONST_OR_PURE)
10431 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
10432 if (flags & ECF_NOVOPS)
10433 DECL_IS_NOVOPS (decl) = 1;
10434 if (flags & ECF_NORETURN)
10435 TREE_THIS_VOLATILE (decl) = 1;
10436 if (flags & ECF_MALLOC)
10437 DECL_IS_MALLOC (decl) = 1;
10438 if (flags & ECF_RETURNS_TWICE)
10439 DECL_IS_RETURNS_TWICE (decl) = 1;
10440 if (flags & ECF_LEAF)
10441 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
10442 NULL, DECL_ATTRIBUTES (decl));
10443 if (flags & ECF_RET1)
10444 DECL_ATTRIBUTES (decl)
10445 = tree_cons (get_identifier ("fn spec"),
10446 build_tree_list (NULL_TREE, build_string (1, "1")),
10447 DECL_ATTRIBUTES (decl));
10448 if ((flags & ECF_TM_PURE) && flag_tm)
10449 apply_tm_attr (decl, get_identifier ("transaction_pure"));
10450 /* Looping const or pure is implied by noreturn.
10451 There is currently no way to declare looping const or looping pure alone. */
10452 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
10453 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
10454 }
10455
10456
10457 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
10458
10459 static void
10460 local_define_builtin (const char *name, tree type, enum built_in_function code,
10461 const char *library_name, int ecf_flags)
10462 {
10463 tree decl;
10464
10465 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
10466 library_name, NULL_TREE);
10467 set_call_expr_flags (decl, ecf_flags);
10468
10469 set_builtin_decl (code, decl, true);
10470 }
10471
10472 /* Call this function after instantiating all builtins that the language
10473 front end cares about. This will build the rest of the builtins
10474 and internal functions that are relied upon by the tree optimizers and
10475 the middle-end. */
10476
10477 void
10478 build_common_builtin_nodes (void)
10479 {
10480 tree tmp, ftype;
10481 int ecf_flags;
10482
10483 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE)
10484 || !builtin_decl_explicit_p (BUILT_IN_ABORT))
10485 {
10486 ftype = build_function_type (void_type_node, void_list_node);
10487 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
10488 local_define_builtin ("__builtin_unreachable", ftype,
10489 BUILT_IN_UNREACHABLE,
10490 "__builtin_unreachable",
10491 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
10492 | ECF_CONST);
10493 if (!builtin_decl_explicit_p (BUILT_IN_ABORT))
10494 local_define_builtin ("__builtin_abort", ftype, BUILT_IN_ABORT,
10495 "abort",
10496 ECF_LEAF | ECF_NORETURN | ECF_CONST);
10497 }
10498
10499 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
10500 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10501 {
10502 ftype = build_function_type_list (ptr_type_node,
10503 ptr_type_node, const_ptr_type_node,
10504 size_type_node, NULL_TREE);
10505
10506 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
10507 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
10508 "memcpy", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
10509 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10510 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
10511 "memmove", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
10512 }
10513
10514 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
10515 {
10516 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10517 const_ptr_type_node, size_type_node,
10518 NULL_TREE);
10519 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
10520 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10521 }
10522
10523 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
10524 {
10525 ftype = build_function_type_list (ptr_type_node,
10526 ptr_type_node, integer_type_node,
10527 size_type_node, NULL_TREE);
10528 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
10529 "memset", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
10530 }
10531
10532 /* If we're checking the stack, `alloca' can throw. */
10533 const int alloca_flags
10534 = ECF_MALLOC | ECF_LEAF | (flag_stack_check ? 0 : ECF_NOTHROW);
10535
10536 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
10537 {
10538 ftype = build_function_type_list (ptr_type_node,
10539 size_type_node, NULL_TREE);
10540 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
10541 "alloca", alloca_flags);
10542 }
10543
10544 ftype = build_function_type_list (ptr_type_node, size_type_node,
10545 size_type_node, NULL_TREE);
10546 local_define_builtin ("__builtin_alloca_with_align", ftype,
10547 BUILT_IN_ALLOCA_WITH_ALIGN,
10548 "__builtin_alloca_with_align",
10549 alloca_flags);
10550
10551 ftype = build_function_type_list (void_type_node,
10552 ptr_type_node, ptr_type_node,
10553 ptr_type_node, NULL_TREE);
10554 local_define_builtin ("__builtin_init_trampoline", ftype,
10555 BUILT_IN_INIT_TRAMPOLINE,
10556 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
10557 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
10558 BUILT_IN_INIT_HEAP_TRAMPOLINE,
10559 "__builtin_init_heap_trampoline",
10560 ECF_NOTHROW | ECF_LEAF);
10561
10562 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
10563 local_define_builtin ("__builtin_adjust_trampoline", ftype,
10564 BUILT_IN_ADJUST_TRAMPOLINE,
10565 "__builtin_adjust_trampoline",
10566 ECF_CONST | ECF_NOTHROW);
10567
10568 ftype = build_function_type_list (void_type_node,
10569 ptr_type_node, ptr_type_node, NULL_TREE);
10570 local_define_builtin ("__builtin_nonlocal_goto", ftype,
10571 BUILT_IN_NONLOCAL_GOTO,
10572 "__builtin_nonlocal_goto",
10573 ECF_NORETURN | ECF_NOTHROW);
10574
10575 ftype = build_function_type_list (void_type_node,
10576 ptr_type_node, ptr_type_node, NULL_TREE);
10577 local_define_builtin ("__builtin_setjmp_setup", ftype,
10578 BUILT_IN_SETJMP_SETUP,
10579 "__builtin_setjmp_setup", ECF_NOTHROW);
10580
10581 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10582 local_define_builtin ("__builtin_setjmp_receiver", ftype,
10583 BUILT_IN_SETJMP_RECEIVER,
10584 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
10585
10586 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
10587 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
10588 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
10589
10590 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10591 local_define_builtin ("__builtin_stack_restore", ftype,
10592 BUILT_IN_STACK_RESTORE,
10593 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
10594
10595 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10596 const_ptr_type_node, size_type_node,
10597 NULL_TREE);
10598 local_define_builtin ("__builtin_memcmp_eq", ftype, BUILT_IN_MEMCMP_EQ,
10599 "__builtin_memcmp_eq",
10600 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10601
10602 /* If there's a possibility that we might use the ARM EABI, build the
10603 alternate __cxa_end_cleanup node used to resume from C++ and Java. */
10604 if (targetm.arm_eabi_unwinder)
10605 {
10606 ftype = build_function_type_list (void_type_node, NULL_TREE);
10607 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10608 BUILT_IN_CXA_END_CLEANUP,
10609 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10610 }
10611
10612 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10613 local_define_builtin ("__builtin_unwind_resume", ftype,
10614 BUILT_IN_UNWIND_RESUME,
10615 ((targetm_common.except_unwind_info (&global_options)
10616 == UI_SJLJ)
10617 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10618 ECF_NORETURN);
10619
10620 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10621 {
10622 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10623 NULL_TREE);
10624 local_define_builtin ("__builtin_return_address", ftype,
10625 BUILT_IN_RETURN_ADDRESS,
10626 "__builtin_return_address",
10627 ECF_NOTHROW);
10628 }
10629
10630 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10631 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10632 {
10633 ftype = build_function_type_list (void_type_node, ptr_type_node,
10634 ptr_type_node, NULL_TREE);
10635 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10636 local_define_builtin ("__cyg_profile_func_enter", ftype,
10637 BUILT_IN_PROFILE_FUNC_ENTER,
10638 "__cyg_profile_func_enter", 0);
10639 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10640 local_define_builtin ("__cyg_profile_func_exit", ftype,
10641 BUILT_IN_PROFILE_FUNC_EXIT,
10642 "__cyg_profile_func_exit", 0);
10643 }
10644
10645 /* The exception object and filter values from the runtime. The argument
10646 must be zero before exception lowering, i.e. from the front end. After
10647 exception lowering, it will be the region number for the exception
10648 landing pad. These functions are PURE instead of CONST to prevent
10649 them from being hoisted past the exception edge that will initialize
10650 its value in the landing pad. */
10651 ftype = build_function_type_list (ptr_type_node,
10652 integer_type_node, NULL_TREE);
10653 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10654 /* Only use TM_PURE if we have TM language support. */
10655 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10656 ecf_flags |= ECF_TM_PURE;
10657 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10658 "__builtin_eh_pointer", ecf_flags);
10659
10660 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10661 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10662 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10663 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10664
10665 ftype = build_function_type_list (void_type_node,
10666 integer_type_node, integer_type_node,
10667 NULL_TREE);
10668 local_define_builtin ("__builtin_eh_copy_values", ftype,
10669 BUILT_IN_EH_COPY_VALUES,
10670 "__builtin_eh_copy_values", ECF_NOTHROW);
10671
10672 /* Complex multiplication and division. These are handled as builtins
10673 rather than optabs because emit_library_call_value doesn't support
10674 complex. Further, we can do slightly better with folding these
10675 beasties if the real and complex parts of the arguments are separate. */
10676 {
10677 int mode;
10678
10679 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10680 {
10681 char mode_name_buf[4], *q;
10682 const char *p;
10683 enum built_in_function mcode, dcode;
10684 tree type, inner_type;
10685 const char *prefix = "__";
10686
10687 if (targetm.libfunc_gnu_prefix)
10688 prefix = "__gnu_";
10689
10690 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
10691 if (type == NULL)
10692 continue;
10693 inner_type = TREE_TYPE (type);
10694
10695 ftype = build_function_type_list (type, inner_type, inner_type,
10696 inner_type, inner_type, NULL_TREE);
10697
10698 mcode = ((enum built_in_function)
10699 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10700 dcode = ((enum built_in_function)
10701 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10702
10703 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10704 *q = TOLOWER (*p);
10705 *q = '\0';
10706
10707 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10708 NULL);
10709 local_define_builtin (built_in_names[mcode], ftype, mcode,
10710 built_in_names[mcode],
10711 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10712
10713 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10714 NULL);
10715 local_define_builtin (built_in_names[dcode], ftype, dcode,
10716 built_in_names[dcode],
10717 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10718 }
10719 }
10720
10721 init_internal_fns ();
10722 }
10723
10724 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10725 better way.
10726
10727 If we requested a pointer to a vector, build up the pointers that
10728 we stripped off while looking for the inner type. Similarly for
10729 return values from functions.
10730
10731 The argument TYPE is the top of the chain, and BOTTOM is the
10732 new type which we will point to. */
10733
10734 tree
10735 reconstruct_complex_type (tree type, tree bottom)
10736 {
10737 tree inner, outer;
10738
10739 if (TREE_CODE (type) == POINTER_TYPE)
10740 {
10741 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10742 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10743 TYPE_REF_CAN_ALIAS_ALL (type));
10744 }
10745 else if (TREE_CODE (type) == REFERENCE_TYPE)
10746 {
10747 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10748 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10749 TYPE_REF_CAN_ALIAS_ALL (type));
10750 }
10751 else if (TREE_CODE (type) == ARRAY_TYPE)
10752 {
10753 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10754 outer = build_array_type (inner, TYPE_DOMAIN (type));
10755 }
10756 else if (TREE_CODE (type) == FUNCTION_TYPE)
10757 {
10758 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10759 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10760 }
10761 else if (TREE_CODE (type) == METHOD_TYPE)
10762 {
10763 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10764 /* The build_method_type_directly() routine prepends 'this' to argument list,
10765 so we must compensate by getting rid of it. */
10766 outer
10767 = build_method_type_directly
10768 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10769 inner,
10770 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10771 }
10772 else if (TREE_CODE (type) == OFFSET_TYPE)
10773 {
10774 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10775 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10776 }
10777 else
10778 return bottom;
10779
10780 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10781 TYPE_QUALS (type));
10782 }
10783
10784 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10785 the inner type. */
10786 tree
10787 build_vector_type_for_mode (tree innertype, machine_mode mode)
10788 {
10789 int nunits;
10790
10791 switch (GET_MODE_CLASS (mode))
10792 {
10793 case MODE_VECTOR_INT:
10794 case MODE_VECTOR_FLOAT:
10795 case MODE_VECTOR_FRACT:
10796 case MODE_VECTOR_UFRACT:
10797 case MODE_VECTOR_ACCUM:
10798 case MODE_VECTOR_UACCUM:
10799 nunits = GET_MODE_NUNITS (mode);
10800 break;
10801
10802 case MODE_INT:
10803 /* Check that there are no leftover bits. */
10804 gcc_assert (GET_MODE_BITSIZE (mode)
10805 % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10806
10807 nunits = GET_MODE_BITSIZE (mode)
10808 / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10809 break;
10810
10811 default:
10812 gcc_unreachable ();
10813 }
10814
10815 return make_vector_type (innertype, nunits, mode);
10816 }
10817
10818 /* Similarly, but takes the inner type and number of units, which must be
10819 a power of two. */
10820
10821 tree
10822 build_vector_type (tree innertype, int nunits)
10823 {
10824 return make_vector_type (innertype, nunits, VOIDmode);
10825 }
10826
10827 /* Build truth vector with specified length and number of units. */
10828
10829 tree
10830 build_truth_vector_type (unsigned nunits, unsigned vector_size)
10831 {
10832 machine_mode mask_mode = targetm.vectorize.get_mask_mode (nunits,
10833 vector_size);
10834
10835 gcc_assert (mask_mode != VOIDmode);
10836
10837 unsigned HOST_WIDE_INT vsize;
10838 if (mask_mode == BLKmode)
10839 vsize = vector_size * BITS_PER_UNIT;
10840 else
10841 vsize = GET_MODE_BITSIZE (mask_mode);
10842
10843 unsigned HOST_WIDE_INT esize = vsize / nunits;
10844 gcc_assert (esize * nunits == vsize);
10845
10846 tree bool_type = build_nonstandard_boolean_type (esize);
10847
10848 return make_vector_type (bool_type, nunits, mask_mode);
10849 }
10850
10851 /* Returns a vector type corresponding to a comparison of VECTYPE. */
10852
10853 tree
10854 build_same_sized_truth_vector_type (tree vectype)
10855 {
10856 if (VECTOR_BOOLEAN_TYPE_P (vectype))
10857 return vectype;
10858
10859 unsigned HOST_WIDE_INT size = GET_MODE_SIZE (TYPE_MODE (vectype));
10860
10861 if (!size)
10862 size = tree_to_uhwi (TYPE_SIZE_UNIT (vectype));
10863
10864 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (vectype), size);
10865 }
10866
10867 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10868
10869 tree
10870 build_opaque_vector_type (tree innertype, int nunits)
10871 {
10872 tree t = make_vector_type (innertype, nunits, VOIDmode);
10873 tree cand;
10874 /* We always build the non-opaque variant before the opaque one,
10875 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10876 cand = TYPE_NEXT_VARIANT (t);
10877 if (cand
10878 && TYPE_VECTOR_OPAQUE (cand)
10879 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10880 return cand;
10881 /* Othewise build a variant type and make sure to queue it after
10882 the non-opaque type. */
10883 cand = build_distinct_type_copy (t);
10884 TYPE_VECTOR_OPAQUE (cand) = true;
10885 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10886 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10887 TYPE_NEXT_VARIANT (t) = cand;
10888 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10889 return cand;
10890 }
10891
10892
10893 /* Given an initializer INIT, return TRUE if INIT is zero or some
10894 aggregate of zeros. Otherwise return FALSE. */
10895 bool
10896 initializer_zerop (const_tree init)
10897 {
10898 tree elt;
10899
10900 STRIP_NOPS (init);
10901
10902 switch (TREE_CODE (init))
10903 {
10904 case INTEGER_CST:
10905 return integer_zerop (init);
10906
10907 case REAL_CST:
10908 /* ??? Note that this is not correct for C4X float formats. There,
10909 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10910 negative exponent. */
10911 return real_zerop (init)
10912 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
10913
10914 case FIXED_CST:
10915 return fixed_zerop (init);
10916
10917 case COMPLEX_CST:
10918 return integer_zerop (init)
10919 || (real_zerop (init)
10920 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10921 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
10922
10923 case VECTOR_CST:
10924 {
10925 unsigned i;
10926 for (i = 0; i < VECTOR_CST_NELTS (init); ++i)
10927 if (!initializer_zerop (VECTOR_CST_ELT (init, i)))
10928 return false;
10929 return true;
10930 }
10931
10932 case CONSTRUCTOR:
10933 {
10934 unsigned HOST_WIDE_INT idx;
10935
10936 if (TREE_CLOBBER_P (init))
10937 return false;
10938 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10939 if (!initializer_zerop (elt))
10940 return false;
10941 return true;
10942 }
10943
10944 case STRING_CST:
10945 {
10946 int i;
10947
10948 /* We need to loop through all elements to handle cases like
10949 "\0" and "\0foobar". */
10950 for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
10951 if (TREE_STRING_POINTER (init)[i] != '\0')
10952 return false;
10953
10954 return true;
10955 }
10956
10957 default:
10958 return false;
10959 }
10960 }
10961
10962 /* Check if vector VEC consists of all the equal elements and
10963 that the number of elements corresponds to the type of VEC.
10964 The function returns first element of the vector
10965 or NULL_TREE if the vector is not uniform. */
10966 tree
10967 uniform_vector_p (const_tree vec)
10968 {
10969 tree first, t;
10970 unsigned i;
10971
10972 if (vec == NULL_TREE)
10973 return NULL_TREE;
10974
10975 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10976
10977 if (TREE_CODE (vec) == VECTOR_CST)
10978 {
10979 first = VECTOR_CST_ELT (vec, 0);
10980 for (i = 1; i < VECTOR_CST_NELTS (vec); ++i)
10981 if (!operand_equal_p (first, VECTOR_CST_ELT (vec, i), 0))
10982 return NULL_TREE;
10983
10984 return first;
10985 }
10986
10987 else if (TREE_CODE (vec) == CONSTRUCTOR)
10988 {
10989 first = error_mark_node;
10990
10991 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10992 {
10993 if (i == 0)
10994 {
10995 first = t;
10996 continue;
10997 }
10998 if (!operand_equal_p (first, t, 0))
10999 return NULL_TREE;
11000 }
11001 if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)))
11002 return NULL_TREE;
11003
11004 return first;
11005 }
11006
11007 return NULL_TREE;
11008 }
11009
11010 /* Build an empty statement at location LOC. */
11011
11012 tree
11013 build_empty_stmt (location_t loc)
11014 {
11015 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
11016 SET_EXPR_LOCATION (t, loc);
11017 return t;
11018 }
11019
11020
11021 /* Build an OpenMP clause with code CODE. LOC is the location of the
11022 clause. */
11023
11024 tree
11025 build_omp_clause (location_t loc, enum omp_clause_code code)
11026 {
11027 tree t;
11028 int size, length;
11029
11030 length = omp_clause_num_ops[code];
11031 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
11032
11033 record_node_allocation_statistics (OMP_CLAUSE, size);
11034
11035 t = (tree) ggc_internal_alloc (size);
11036 memset (t, 0, size);
11037 TREE_SET_CODE (t, OMP_CLAUSE);
11038 OMP_CLAUSE_SET_CODE (t, code);
11039 OMP_CLAUSE_LOCATION (t) = loc;
11040
11041 return t;
11042 }
11043
11044 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
11045 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
11046 Except for the CODE and operand count field, other storage for the
11047 object is initialized to zeros. */
11048
11049 tree
11050 build_vl_exp_stat (enum tree_code code, int len MEM_STAT_DECL)
11051 {
11052 tree t;
11053 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
11054
11055 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
11056 gcc_assert (len >= 1);
11057
11058 record_node_allocation_statistics (code, length);
11059
11060 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
11061
11062 TREE_SET_CODE (t, code);
11063
11064 /* Can't use TREE_OPERAND to store the length because if checking is
11065 enabled, it will try to check the length before we store it. :-P */
11066 t->exp.operands[0] = build_int_cst (sizetype, len);
11067
11068 return t;
11069 }
11070
11071 /* Helper function for build_call_* functions; build a CALL_EXPR with
11072 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
11073 the argument slots. */
11074
11075 static tree
11076 build_call_1 (tree return_type, tree fn, int nargs)
11077 {
11078 tree t;
11079
11080 t = build_vl_exp (CALL_EXPR, nargs + 3);
11081 TREE_TYPE (t) = return_type;
11082 CALL_EXPR_FN (t) = fn;
11083 CALL_EXPR_STATIC_CHAIN (t) = NULL;
11084
11085 return t;
11086 }
11087
11088 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11089 FN and a null static chain slot. NARGS is the number of call arguments
11090 which are specified as "..." arguments. */
11091
11092 tree
11093 build_call_nary (tree return_type, tree fn, int nargs, ...)
11094 {
11095 tree ret;
11096 va_list args;
11097 va_start (args, nargs);
11098 ret = build_call_valist (return_type, fn, nargs, args);
11099 va_end (args);
11100 return ret;
11101 }
11102
11103 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11104 FN and a null static chain slot. NARGS is the number of call arguments
11105 which are specified as a va_list ARGS. */
11106
11107 tree
11108 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
11109 {
11110 tree t;
11111 int i;
11112
11113 t = build_call_1 (return_type, fn, nargs);
11114 for (i = 0; i < nargs; i++)
11115 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
11116 process_call_operands (t);
11117 return t;
11118 }
11119
11120 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11121 FN and a null static chain slot. NARGS is the number of call arguments
11122 which are specified as a tree array ARGS. */
11123
11124 tree
11125 build_call_array_loc (location_t loc, tree return_type, tree fn,
11126 int nargs, const tree *args)
11127 {
11128 tree t;
11129 int i;
11130
11131 t = build_call_1 (return_type, fn, nargs);
11132 for (i = 0; i < nargs; i++)
11133 CALL_EXPR_ARG (t, i) = args[i];
11134 process_call_operands (t);
11135 SET_EXPR_LOCATION (t, loc);
11136 return t;
11137 }
11138
11139 /* Like build_call_array, but takes a vec. */
11140
11141 tree
11142 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
11143 {
11144 tree ret, t;
11145 unsigned int ix;
11146
11147 ret = build_call_1 (return_type, fn, vec_safe_length (args));
11148 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
11149 CALL_EXPR_ARG (ret, ix) = t;
11150 process_call_operands (ret);
11151 return ret;
11152 }
11153
11154 /* Conveniently construct a function call expression. FNDECL names the
11155 function to be called and N arguments are passed in the array
11156 ARGARRAY. */
11157
11158 tree
11159 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
11160 {
11161 tree fntype = TREE_TYPE (fndecl);
11162 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11163
11164 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
11165 }
11166
11167 /* Conveniently construct a function call expression. FNDECL names the
11168 function to be called and the arguments are passed in the vector
11169 VEC. */
11170
11171 tree
11172 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
11173 {
11174 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
11175 vec_safe_address (vec));
11176 }
11177
11178
11179 /* Conveniently construct a function call expression. FNDECL names the
11180 function to be called, N is the number of arguments, and the "..."
11181 parameters are the argument expressions. */
11182
11183 tree
11184 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11185 {
11186 va_list ap;
11187 tree *argarray = XALLOCAVEC (tree, n);
11188 int i;
11189
11190 va_start (ap, n);
11191 for (i = 0; i < n; i++)
11192 argarray[i] = va_arg (ap, tree);
11193 va_end (ap);
11194 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11195 }
11196
11197 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11198 varargs macros aren't supported by all bootstrap compilers. */
11199
11200 tree
11201 build_call_expr (tree fndecl, int n, ...)
11202 {
11203 va_list ap;
11204 tree *argarray = XALLOCAVEC (tree, n);
11205 int i;
11206
11207 va_start (ap, n);
11208 for (i = 0; i < n; i++)
11209 argarray[i] = va_arg (ap, tree);
11210 va_end (ap);
11211 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
11212 }
11213
11214 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
11215 type TYPE. This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
11216 It will get gimplified later into an ordinary internal function. */
11217
11218 tree
11219 build_call_expr_internal_loc_array (location_t loc, internal_fn ifn,
11220 tree type, int n, const tree *args)
11221 {
11222 tree t = build_call_1 (type, NULL_TREE, n);
11223 for (int i = 0; i < n; ++i)
11224 CALL_EXPR_ARG (t, i) = args[i];
11225 SET_EXPR_LOCATION (t, loc);
11226 CALL_EXPR_IFN (t) = ifn;
11227 return t;
11228 }
11229
11230 /* Build internal call expression. This is just like CALL_EXPR, except
11231 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
11232 internal function. */
11233
11234 tree
11235 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
11236 tree type, int n, ...)
11237 {
11238 va_list ap;
11239 tree *argarray = XALLOCAVEC (tree, n);
11240 int i;
11241
11242 va_start (ap, n);
11243 for (i = 0; i < n; i++)
11244 argarray[i] = va_arg (ap, tree);
11245 va_end (ap);
11246 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
11247 }
11248
11249 /* Return a function call to FN, if the target is guaranteed to support it,
11250 or null otherwise.
11251
11252 N is the number of arguments, passed in the "...", and TYPE is the
11253 type of the return value. */
11254
11255 tree
11256 maybe_build_call_expr_loc (location_t loc, combined_fn fn, tree type,
11257 int n, ...)
11258 {
11259 va_list ap;
11260 tree *argarray = XALLOCAVEC (tree, n);
11261 int i;
11262
11263 va_start (ap, n);
11264 for (i = 0; i < n; i++)
11265 argarray[i] = va_arg (ap, tree);
11266 va_end (ap);
11267 if (internal_fn_p (fn))
11268 {
11269 internal_fn ifn = as_internal_fn (fn);
11270 if (direct_internal_fn_p (ifn))
11271 {
11272 tree_pair types = direct_internal_fn_types (ifn, type, argarray);
11273 if (!direct_internal_fn_supported_p (ifn, types,
11274 OPTIMIZE_FOR_BOTH))
11275 return NULL_TREE;
11276 }
11277 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
11278 }
11279 else
11280 {
11281 tree fndecl = builtin_decl_implicit (as_builtin_fn (fn));
11282 if (!fndecl)
11283 return NULL_TREE;
11284 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11285 }
11286 }
11287
11288 /* Create a new constant string literal and return a char* pointer to it.
11289 The STRING_CST value is the LEN characters at STR. */
11290 tree
11291 build_string_literal (int len, const char *str)
11292 {
11293 tree t, elem, index, type;
11294
11295 t = build_string (len, str);
11296 elem = build_type_variant (char_type_node, 1, 0);
11297 index = build_index_type (size_int (len - 1));
11298 type = build_array_type (elem, index);
11299 TREE_TYPE (t) = type;
11300 TREE_CONSTANT (t) = 1;
11301 TREE_READONLY (t) = 1;
11302 TREE_STATIC (t) = 1;
11303
11304 type = build_pointer_type (elem);
11305 t = build1 (ADDR_EXPR, type,
11306 build4 (ARRAY_REF, elem,
11307 t, integer_zero_node, NULL_TREE, NULL_TREE));
11308 return t;
11309 }
11310
11311
11312
11313 /* Return true if T (assumed to be a DECL) must be assigned a memory
11314 location. */
11315
11316 bool
11317 needs_to_live_in_memory (const_tree t)
11318 {
11319 return (TREE_ADDRESSABLE (t)
11320 || is_global_var (t)
11321 || (TREE_CODE (t) == RESULT_DECL
11322 && !DECL_BY_REFERENCE (t)
11323 && aggregate_value_p (t, current_function_decl)));
11324 }
11325
11326 /* Return value of a constant X and sign-extend it. */
11327
11328 HOST_WIDE_INT
11329 int_cst_value (const_tree x)
11330 {
11331 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
11332 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
11333
11334 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
11335 gcc_assert (cst_and_fits_in_hwi (x));
11336
11337 if (bits < HOST_BITS_PER_WIDE_INT)
11338 {
11339 bool negative = ((val >> (bits - 1)) & 1) != 0;
11340 if (negative)
11341 val |= HOST_WIDE_INT_M1U << (bits - 1) << 1;
11342 else
11343 val &= ~(HOST_WIDE_INT_M1U << (bits - 1) << 1);
11344 }
11345
11346 return val;
11347 }
11348
11349 /* If TYPE is an integral or pointer type, return an integer type with
11350 the same precision which is unsigned iff UNSIGNEDP is true, or itself
11351 if TYPE is already an integer type of signedness UNSIGNEDP. */
11352
11353 tree
11354 signed_or_unsigned_type_for (int unsignedp, tree type)
11355 {
11356 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
11357 return type;
11358
11359 if (TREE_CODE (type) == VECTOR_TYPE)
11360 {
11361 tree inner = TREE_TYPE (type);
11362 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11363 if (!inner2)
11364 return NULL_TREE;
11365 if (inner == inner2)
11366 return type;
11367 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
11368 }
11369
11370 if (!INTEGRAL_TYPE_P (type)
11371 && !POINTER_TYPE_P (type)
11372 && TREE_CODE (type) != OFFSET_TYPE)
11373 return NULL_TREE;
11374
11375 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
11376 }
11377
11378 /* If TYPE is an integral or pointer type, return an integer type with
11379 the same precision which is unsigned, or itself if TYPE is already an
11380 unsigned integer type. */
11381
11382 tree
11383 unsigned_type_for (tree type)
11384 {
11385 return signed_or_unsigned_type_for (1, type);
11386 }
11387
11388 /* If TYPE is an integral or pointer type, return an integer type with
11389 the same precision which is signed, or itself if TYPE is already a
11390 signed integer type. */
11391
11392 tree
11393 signed_type_for (tree type)
11394 {
11395 return signed_or_unsigned_type_for (0, type);
11396 }
11397
11398 /* If TYPE is a vector type, return a signed integer vector type with the
11399 same width and number of subparts. Otherwise return boolean_type_node. */
11400
11401 tree
11402 truth_type_for (tree type)
11403 {
11404 if (TREE_CODE (type) == VECTOR_TYPE)
11405 {
11406 if (VECTOR_BOOLEAN_TYPE_P (type))
11407 return type;
11408 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (type),
11409 GET_MODE_SIZE (TYPE_MODE (type)));
11410 }
11411 else
11412 return boolean_type_node;
11413 }
11414
11415 /* Returns the largest value obtainable by casting something in INNER type to
11416 OUTER type. */
11417
11418 tree
11419 upper_bound_in_type (tree outer, tree inner)
11420 {
11421 unsigned int det = 0;
11422 unsigned oprec = TYPE_PRECISION (outer);
11423 unsigned iprec = TYPE_PRECISION (inner);
11424 unsigned prec;
11425
11426 /* Compute a unique number for every combination. */
11427 det |= (oprec > iprec) ? 4 : 0;
11428 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
11429 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
11430
11431 /* Determine the exponent to use. */
11432 switch (det)
11433 {
11434 case 0:
11435 case 1:
11436 /* oprec <= iprec, outer: signed, inner: don't care. */
11437 prec = oprec - 1;
11438 break;
11439 case 2:
11440 case 3:
11441 /* oprec <= iprec, outer: unsigned, inner: don't care. */
11442 prec = oprec;
11443 break;
11444 case 4:
11445 /* oprec > iprec, outer: signed, inner: signed. */
11446 prec = iprec - 1;
11447 break;
11448 case 5:
11449 /* oprec > iprec, outer: signed, inner: unsigned. */
11450 prec = iprec;
11451 break;
11452 case 6:
11453 /* oprec > iprec, outer: unsigned, inner: signed. */
11454 prec = oprec;
11455 break;
11456 case 7:
11457 /* oprec > iprec, outer: unsigned, inner: unsigned. */
11458 prec = iprec;
11459 break;
11460 default:
11461 gcc_unreachable ();
11462 }
11463
11464 return wide_int_to_tree (outer,
11465 wi::mask (prec, false, TYPE_PRECISION (outer)));
11466 }
11467
11468 /* Returns the smallest value obtainable by casting something in INNER type to
11469 OUTER type. */
11470
11471 tree
11472 lower_bound_in_type (tree outer, tree inner)
11473 {
11474 unsigned oprec = TYPE_PRECISION (outer);
11475 unsigned iprec = TYPE_PRECISION (inner);
11476
11477 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
11478 and obtain 0. */
11479 if (TYPE_UNSIGNED (outer)
11480 /* If we are widening something of an unsigned type, OUTER type
11481 contains all values of INNER type. In particular, both INNER
11482 and OUTER types have zero in common. */
11483 || (oprec > iprec && TYPE_UNSIGNED (inner)))
11484 return build_int_cst (outer, 0);
11485 else
11486 {
11487 /* If we are widening a signed type to another signed type, we
11488 want to obtain -2^^(iprec-1). If we are keeping the
11489 precision or narrowing to a signed type, we want to obtain
11490 -2^(oprec-1). */
11491 unsigned prec = oprec > iprec ? iprec : oprec;
11492 return wide_int_to_tree (outer,
11493 wi::mask (prec - 1, true,
11494 TYPE_PRECISION (outer)));
11495 }
11496 }
11497
11498 /* Return nonzero if two operands that are suitable for PHI nodes are
11499 necessarily equal. Specifically, both ARG0 and ARG1 must be either
11500 SSA_NAME or invariant. Note that this is strictly an optimization.
11501 That is, callers of this function can directly call operand_equal_p
11502 and get the same result, only slower. */
11503
11504 int
11505 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
11506 {
11507 if (arg0 == arg1)
11508 return 1;
11509 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
11510 return 0;
11511 return operand_equal_p (arg0, arg1, 0);
11512 }
11513
11514 /* Returns number of zeros at the end of binary representation of X. */
11515
11516 tree
11517 num_ending_zeros (const_tree x)
11518 {
11519 return build_int_cst (TREE_TYPE (x), wi::ctz (x));
11520 }
11521
11522
11523 #define WALK_SUBTREE(NODE) \
11524 do \
11525 { \
11526 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
11527 if (result) \
11528 return result; \
11529 } \
11530 while (0)
11531
11532 /* This is a subroutine of walk_tree that walks field of TYPE that are to
11533 be walked whenever a type is seen in the tree. Rest of operands and return
11534 value are as for walk_tree. */
11535
11536 static tree
11537 walk_type_fields (tree type, walk_tree_fn func, void *data,
11538 hash_set<tree> *pset, walk_tree_lh lh)
11539 {
11540 tree result = NULL_TREE;
11541
11542 switch (TREE_CODE (type))
11543 {
11544 case POINTER_TYPE:
11545 case REFERENCE_TYPE:
11546 case VECTOR_TYPE:
11547 /* We have to worry about mutually recursive pointers. These can't
11548 be written in C. They can in Ada. It's pathological, but
11549 there's an ACATS test (c38102a) that checks it. Deal with this
11550 by checking if we're pointing to another pointer, that one
11551 points to another pointer, that one does too, and we have no htab.
11552 If so, get a hash table. We check three levels deep to avoid
11553 the cost of the hash table if we don't need one. */
11554 if (POINTER_TYPE_P (TREE_TYPE (type))
11555 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
11556 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
11557 && !pset)
11558 {
11559 result = walk_tree_without_duplicates (&TREE_TYPE (type),
11560 func, data);
11561 if (result)
11562 return result;
11563
11564 break;
11565 }
11566
11567 /* ... fall through ... */
11568
11569 case COMPLEX_TYPE:
11570 WALK_SUBTREE (TREE_TYPE (type));
11571 break;
11572
11573 case METHOD_TYPE:
11574 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
11575
11576 /* Fall through. */
11577
11578 case FUNCTION_TYPE:
11579 WALK_SUBTREE (TREE_TYPE (type));
11580 {
11581 tree arg;
11582
11583 /* We never want to walk into default arguments. */
11584 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
11585 WALK_SUBTREE (TREE_VALUE (arg));
11586 }
11587 break;
11588
11589 case ARRAY_TYPE:
11590 /* Don't follow this nodes's type if a pointer for fear that
11591 we'll have infinite recursion. If we have a PSET, then we
11592 need not fear. */
11593 if (pset
11594 || (!POINTER_TYPE_P (TREE_TYPE (type))
11595 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
11596 WALK_SUBTREE (TREE_TYPE (type));
11597 WALK_SUBTREE (TYPE_DOMAIN (type));
11598 break;
11599
11600 case OFFSET_TYPE:
11601 WALK_SUBTREE (TREE_TYPE (type));
11602 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
11603 break;
11604
11605 default:
11606 break;
11607 }
11608
11609 return NULL_TREE;
11610 }
11611
11612 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11613 called with the DATA and the address of each sub-tree. If FUNC returns a
11614 non-NULL value, the traversal is stopped, and the value returned by FUNC
11615 is returned. If PSET is non-NULL it is used to record the nodes visited,
11616 and to avoid visiting a node more than once. */
11617
11618 tree
11619 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
11620 hash_set<tree> *pset, walk_tree_lh lh)
11621 {
11622 enum tree_code code;
11623 int walk_subtrees;
11624 tree result;
11625
11626 #define WALK_SUBTREE_TAIL(NODE) \
11627 do \
11628 { \
11629 tp = & (NODE); \
11630 goto tail_recurse; \
11631 } \
11632 while (0)
11633
11634 tail_recurse:
11635 /* Skip empty subtrees. */
11636 if (!*tp)
11637 return NULL_TREE;
11638
11639 /* Don't walk the same tree twice, if the user has requested
11640 that we avoid doing so. */
11641 if (pset && pset->add (*tp))
11642 return NULL_TREE;
11643
11644 /* Call the function. */
11645 walk_subtrees = 1;
11646 result = (*func) (tp, &walk_subtrees, data);
11647
11648 /* If we found something, return it. */
11649 if (result)
11650 return result;
11651
11652 code = TREE_CODE (*tp);
11653
11654 /* Even if we didn't, FUNC may have decided that there was nothing
11655 interesting below this point in the tree. */
11656 if (!walk_subtrees)
11657 {
11658 /* But we still need to check our siblings. */
11659 if (code == TREE_LIST)
11660 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11661 else if (code == OMP_CLAUSE)
11662 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11663 else
11664 return NULL_TREE;
11665 }
11666
11667 if (lh)
11668 {
11669 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11670 if (result || !walk_subtrees)
11671 return result;
11672 }
11673
11674 switch (code)
11675 {
11676 case ERROR_MARK:
11677 case IDENTIFIER_NODE:
11678 case INTEGER_CST:
11679 case REAL_CST:
11680 case FIXED_CST:
11681 case VECTOR_CST:
11682 case STRING_CST:
11683 case BLOCK:
11684 case PLACEHOLDER_EXPR:
11685 case SSA_NAME:
11686 case FIELD_DECL:
11687 case RESULT_DECL:
11688 /* None of these have subtrees other than those already walked
11689 above. */
11690 break;
11691
11692 case TREE_LIST:
11693 WALK_SUBTREE (TREE_VALUE (*tp));
11694 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11695 break;
11696
11697 case TREE_VEC:
11698 {
11699 int len = TREE_VEC_LENGTH (*tp);
11700
11701 if (len == 0)
11702 break;
11703
11704 /* Walk all elements but the first. */
11705 while (--len)
11706 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11707
11708 /* Now walk the first one as a tail call. */
11709 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11710 }
11711
11712 case COMPLEX_CST:
11713 WALK_SUBTREE (TREE_REALPART (*tp));
11714 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11715
11716 case CONSTRUCTOR:
11717 {
11718 unsigned HOST_WIDE_INT idx;
11719 constructor_elt *ce;
11720
11721 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11722 idx++)
11723 WALK_SUBTREE (ce->value);
11724 }
11725 break;
11726
11727 case SAVE_EXPR:
11728 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11729
11730 case BIND_EXPR:
11731 {
11732 tree decl;
11733 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11734 {
11735 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11736 into declarations that are just mentioned, rather than
11737 declared; they don't really belong to this part of the tree.
11738 And, we can see cycles: the initializer for a declaration
11739 can refer to the declaration itself. */
11740 WALK_SUBTREE (DECL_INITIAL (decl));
11741 WALK_SUBTREE (DECL_SIZE (decl));
11742 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11743 }
11744 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11745 }
11746
11747 case STATEMENT_LIST:
11748 {
11749 tree_stmt_iterator i;
11750 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11751 WALK_SUBTREE (*tsi_stmt_ptr (i));
11752 }
11753 break;
11754
11755 case OMP_CLAUSE:
11756 switch (OMP_CLAUSE_CODE (*tp))
11757 {
11758 case OMP_CLAUSE_GANG:
11759 case OMP_CLAUSE__GRIDDIM_:
11760 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11761 /* FALLTHRU */
11762
11763 case OMP_CLAUSE_ASYNC:
11764 case OMP_CLAUSE_WAIT:
11765 case OMP_CLAUSE_WORKER:
11766 case OMP_CLAUSE_VECTOR:
11767 case OMP_CLAUSE_NUM_GANGS:
11768 case OMP_CLAUSE_NUM_WORKERS:
11769 case OMP_CLAUSE_VECTOR_LENGTH:
11770 case OMP_CLAUSE_PRIVATE:
11771 case OMP_CLAUSE_SHARED:
11772 case OMP_CLAUSE_FIRSTPRIVATE:
11773 case OMP_CLAUSE_COPYIN:
11774 case OMP_CLAUSE_COPYPRIVATE:
11775 case OMP_CLAUSE_FINAL:
11776 case OMP_CLAUSE_IF:
11777 case OMP_CLAUSE_NUM_THREADS:
11778 case OMP_CLAUSE_SCHEDULE:
11779 case OMP_CLAUSE_UNIFORM:
11780 case OMP_CLAUSE_DEPEND:
11781 case OMP_CLAUSE_NUM_TEAMS:
11782 case OMP_CLAUSE_THREAD_LIMIT:
11783 case OMP_CLAUSE_DEVICE:
11784 case OMP_CLAUSE_DIST_SCHEDULE:
11785 case OMP_CLAUSE_SAFELEN:
11786 case OMP_CLAUSE_SIMDLEN:
11787 case OMP_CLAUSE_ORDERED:
11788 case OMP_CLAUSE_PRIORITY:
11789 case OMP_CLAUSE_GRAINSIZE:
11790 case OMP_CLAUSE_NUM_TASKS:
11791 case OMP_CLAUSE_HINT:
11792 case OMP_CLAUSE_TO_DECLARE:
11793 case OMP_CLAUSE_LINK:
11794 case OMP_CLAUSE_USE_DEVICE_PTR:
11795 case OMP_CLAUSE_IS_DEVICE_PTR:
11796 case OMP_CLAUSE__LOOPTEMP_:
11797 case OMP_CLAUSE__SIMDUID_:
11798 case OMP_CLAUSE__CILK_FOR_COUNT_:
11799 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11800 /* FALLTHRU */
11801
11802 case OMP_CLAUSE_INDEPENDENT:
11803 case OMP_CLAUSE_NOWAIT:
11804 case OMP_CLAUSE_DEFAULT:
11805 case OMP_CLAUSE_UNTIED:
11806 case OMP_CLAUSE_MERGEABLE:
11807 case OMP_CLAUSE_PROC_BIND:
11808 case OMP_CLAUSE_INBRANCH:
11809 case OMP_CLAUSE_NOTINBRANCH:
11810 case OMP_CLAUSE_FOR:
11811 case OMP_CLAUSE_PARALLEL:
11812 case OMP_CLAUSE_SECTIONS:
11813 case OMP_CLAUSE_TASKGROUP:
11814 case OMP_CLAUSE_NOGROUP:
11815 case OMP_CLAUSE_THREADS:
11816 case OMP_CLAUSE_SIMD:
11817 case OMP_CLAUSE_DEFAULTMAP:
11818 case OMP_CLAUSE_AUTO:
11819 case OMP_CLAUSE_SEQ:
11820 case OMP_CLAUSE_TILE:
11821 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11822
11823 case OMP_CLAUSE_LASTPRIVATE:
11824 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11825 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11826 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11827
11828 case OMP_CLAUSE_COLLAPSE:
11829 {
11830 int i;
11831 for (i = 0; i < 3; i++)
11832 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11833 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11834 }
11835
11836 case OMP_CLAUSE_LINEAR:
11837 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11838 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
11839 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
11840 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11841
11842 case OMP_CLAUSE_ALIGNED:
11843 case OMP_CLAUSE_FROM:
11844 case OMP_CLAUSE_TO:
11845 case OMP_CLAUSE_MAP:
11846 case OMP_CLAUSE__CACHE_:
11847 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11848 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11849 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11850
11851 case OMP_CLAUSE_REDUCTION:
11852 {
11853 int i;
11854 for (i = 0; i < 5; i++)
11855 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11856 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11857 }
11858
11859 default:
11860 gcc_unreachable ();
11861 }
11862 break;
11863
11864 case TARGET_EXPR:
11865 {
11866 int i, len;
11867
11868 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11869 But, we only want to walk once. */
11870 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11871 for (i = 0; i < len; ++i)
11872 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11873 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11874 }
11875
11876 case DECL_EXPR:
11877 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11878 defining. We only want to walk into these fields of a type in this
11879 case and not in the general case of a mere reference to the type.
11880
11881 The criterion is as follows: if the field can be an expression, it
11882 must be walked only here. This should be in keeping with the fields
11883 that are directly gimplified in gimplify_type_sizes in order for the
11884 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11885 variable-sized types.
11886
11887 Note that DECLs get walked as part of processing the BIND_EXPR. */
11888 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11889 {
11890 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11891 if (TREE_CODE (*type_p) == ERROR_MARK)
11892 return NULL_TREE;
11893
11894 /* Call the function for the type. See if it returns anything or
11895 doesn't want us to continue. If we are to continue, walk both
11896 the normal fields and those for the declaration case. */
11897 result = (*func) (type_p, &walk_subtrees, data);
11898 if (result || !walk_subtrees)
11899 return result;
11900
11901 /* But do not walk a pointed-to type since it may itself need to
11902 be walked in the declaration case if it isn't anonymous. */
11903 if (!POINTER_TYPE_P (*type_p))
11904 {
11905 result = walk_type_fields (*type_p, func, data, pset, lh);
11906 if (result)
11907 return result;
11908 }
11909
11910 /* If this is a record type, also walk the fields. */
11911 if (RECORD_OR_UNION_TYPE_P (*type_p))
11912 {
11913 tree field;
11914
11915 for (field = TYPE_FIELDS (*type_p); field;
11916 field = DECL_CHAIN (field))
11917 {
11918 /* We'd like to look at the type of the field, but we can
11919 easily get infinite recursion. So assume it's pointed
11920 to elsewhere in the tree. Also, ignore things that
11921 aren't fields. */
11922 if (TREE_CODE (field) != FIELD_DECL)
11923 continue;
11924
11925 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11926 WALK_SUBTREE (DECL_SIZE (field));
11927 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11928 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11929 WALK_SUBTREE (DECL_QUALIFIER (field));
11930 }
11931 }
11932
11933 /* Same for scalar types. */
11934 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11935 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11936 || TREE_CODE (*type_p) == INTEGER_TYPE
11937 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11938 || TREE_CODE (*type_p) == REAL_TYPE)
11939 {
11940 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11941 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11942 }
11943
11944 WALK_SUBTREE (TYPE_SIZE (*type_p));
11945 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11946 }
11947 /* FALLTHRU */
11948
11949 default:
11950 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11951 {
11952 int i, len;
11953
11954 /* Walk over all the sub-trees of this operand. */
11955 len = TREE_OPERAND_LENGTH (*tp);
11956
11957 /* Go through the subtrees. We need to do this in forward order so
11958 that the scope of a FOR_EXPR is handled properly. */
11959 if (len)
11960 {
11961 for (i = 0; i < len - 1; ++i)
11962 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11963 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11964 }
11965 }
11966 /* If this is a type, walk the needed fields in the type. */
11967 else if (TYPE_P (*tp))
11968 return walk_type_fields (*tp, func, data, pset, lh);
11969 break;
11970 }
11971
11972 /* We didn't find what we were looking for. */
11973 return NULL_TREE;
11974
11975 #undef WALK_SUBTREE_TAIL
11976 }
11977 #undef WALK_SUBTREE
11978
11979 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11980
11981 tree
11982 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11983 walk_tree_lh lh)
11984 {
11985 tree result;
11986
11987 hash_set<tree> pset;
11988 result = walk_tree_1 (tp, func, data, &pset, lh);
11989 return result;
11990 }
11991
11992
11993 tree
11994 tree_block (tree t)
11995 {
11996 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11997
11998 if (IS_EXPR_CODE_CLASS (c))
11999 return LOCATION_BLOCK (t->exp.locus);
12000 gcc_unreachable ();
12001 return NULL;
12002 }
12003
12004 void
12005 tree_set_block (tree t, tree b)
12006 {
12007 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
12008
12009 if (IS_EXPR_CODE_CLASS (c))
12010 {
12011 t->exp.locus = set_block (t->exp.locus, b);
12012 }
12013 else
12014 gcc_unreachable ();
12015 }
12016
12017 /* Create a nameless artificial label and put it in the current
12018 function context. The label has a location of LOC. Returns the
12019 newly created label. */
12020
12021 tree
12022 create_artificial_label (location_t loc)
12023 {
12024 tree lab = build_decl (loc,
12025 LABEL_DECL, NULL_TREE, void_type_node);
12026
12027 DECL_ARTIFICIAL (lab) = 1;
12028 DECL_IGNORED_P (lab) = 1;
12029 DECL_CONTEXT (lab) = current_function_decl;
12030 return lab;
12031 }
12032
12033 /* Given a tree, try to return a useful variable name that we can use
12034 to prefix a temporary that is being assigned the value of the tree.
12035 I.E. given <temp> = &A, return A. */
12036
12037 const char *
12038 get_name (tree t)
12039 {
12040 tree stripped_decl;
12041
12042 stripped_decl = t;
12043 STRIP_NOPS (stripped_decl);
12044 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
12045 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
12046 else if (TREE_CODE (stripped_decl) == SSA_NAME)
12047 {
12048 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
12049 if (!name)
12050 return NULL;
12051 return IDENTIFIER_POINTER (name);
12052 }
12053 else
12054 {
12055 switch (TREE_CODE (stripped_decl))
12056 {
12057 case ADDR_EXPR:
12058 return get_name (TREE_OPERAND (stripped_decl, 0));
12059 default:
12060 return NULL;
12061 }
12062 }
12063 }
12064
12065 /* Return true if TYPE has a variable argument list. */
12066
12067 bool
12068 stdarg_p (const_tree fntype)
12069 {
12070 function_args_iterator args_iter;
12071 tree n = NULL_TREE, t;
12072
12073 if (!fntype)
12074 return false;
12075
12076 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
12077 {
12078 n = t;
12079 }
12080
12081 return n != NULL_TREE && n != void_type_node;
12082 }
12083
12084 /* Return true if TYPE has a prototype. */
12085
12086 bool
12087 prototype_p (const_tree fntype)
12088 {
12089 tree t;
12090
12091 gcc_assert (fntype != NULL_TREE);
12092
12093 t = TYPE_ARG_TYPES (fntype);
12094 return (t != NULL_TREE);
12095 }
12096
12097 /* If BLOCK is inlined from an __attribute__((__artificial__))
12098 routine, return pointer to location from where it has been
12099 called. */
12100 location_t *
12101 block_nonartificial_location (tree block)
12102 {
12103 location_t *ret = NULL;
12104
12105 while (block && TREE_CODE (block) == BLOCK
12106 && BLOCK_ABSTRACT_ORIGIN (block))
12107 {
12108 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
12109
12110 while (TREE_CODE (ao) == BLOCK
12111 && BLOCK_ABSTRACT_ORIGIN (ao)
12112 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
12113 ao = BLOCK_ABSTRACT_ORIGIN (ao);
12114
12115 if (TREE_CODE (ao) == FUNCTION_DECL)
12116 {
12117 /* If AO is an artificial inline, point RET to the
12118 call site locus at which it has been inlined and continue
12119 the loop, in case AO's caller is also an artificial
12120 inline. */
12121 if (DECL_DECLARED_INLINE_P (ao)
12122 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
12123 ret = &BLOCK_SOURCE_LOCATION (block);
12124 else
12125 break;
12126 }
12127 else if (TREE_CODE (ao) != BLOCK)
12128 break;
12129
12130 block = BLOCK_SUPERCONTEXT (block);
12131 }
12132 return ret;
12133 }
12134
12135
12136 /* If EXP is inlined from an __attribute__((__artificial__))
12137 function, return the location of the original call expression. */
12138
12139 location_t
12140 tree_nonartificial_location (tree exp)
12141 {
12142 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
12143
12144 if (loc)
12145 return *loc;
12146 else
12147 return EXPR_LOCATION (exp);
12148 }
12149
12150
12151 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
12152 nodes. */
12153
12154 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
12155
12156 hashval_t
12157 cl_option_hasher::hash (tree x)
12158 {
12159 const_tree const t = x;
12160 const char *p;
12161 size_t i;
12162 size_t len = 0;
12163 hashval_t hash = 0;
12164
12165 if (TREE_CODE (t) == OPTIMIZATION_NODE)
12166 {
12167 p = (const char *)TREE_OPTIMIZATION (t);
12168 len = sizeof (struct cl_optimization);
12169 }
12170
12171 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
12172 return cl_target_option_hash (TREE_TARGET_OPTION (t));
12173
12174 else
12175 gcc_unreachable ();
12176
12177 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
12178 something else. */
12179 for (i = 0; i < len; i++)
12180 if (p[i])
12181 hash = (hash << 4) ^ ((i << 2) | p[i]);
12182
12183 return hash;
12184 }
12185
12186 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
12187 TARGET_OPTION tree node) is the same as that given by *Y, which is the
12188 same. */
12189
12190 bool
12191 cl_option_hasher::equal (tree x, tree y)
12192 {
12193 const_tree const xt = x;
12194 const_tree const yt = y;
12195 const char *xp;
12196 const char *yp;
12197 size_t len;
12198
12199 if (TREE_CODE (xt) != TREE_CODE (yt))
12200 return 0;
12201
12202 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
12203 {
12204 xp = (const char *)TREE_OPTIMIZATION (xt);
12205 yp = (const char *)TREE_OPTIMIZATION (yt);
12206 len = sizeof (struct cl_optimization);
12207 }
12208
12209 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
12210 {
12211 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
12212 TREE_TARGET_OPTION (yt));
12213 }
12214
12215 else
12216 gcc_unreachable ();
12217
12218 return (memcmp (xp, yp, len) == 0);
12219 }
12220
12221 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
12222
12223 tree
12224 build_optimization_node (struct gcc_options *opts)
12225 {
12226 tree t;
12227
12228 /* Use the cache of optimization nodes. */
12229
12230 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
12231 opts);
12232
12233 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
12234 t = *slot;
12235 if (!t)
12236 {
12237 /* Insert this one into the hash table. */
12238 t = cl_optimization_node;
12239 *slot = t;
12240
12241 /* Make a new node for next time round. */
12242 cl_optimization_node = make_node (OPTIMIZATION_NODE);
12243 }
12244
12245 return t;
12246 }
12247
12248 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
12249
12250 tree
12251 build_target_option_node (struct gcc_options *opts)
12252 {
12253 tree t;
12254
12255 /* Use the cache of optimization nodes. */
12256
12257 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
12258 opts);
12259
12260 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
12261 t = *slot;
12262 if (!t)
12263 {
12264 /* Insert this one into the hash table. */
12265 t = cl_target_option_node;
12266 *slot = t;
12267
12268 /* Make a new node for next time round. */
12269 cl_target_option_node = make_node (TARGET_OPTION_NODE);
12270 }
12271
12272 return t;
12273 }
12274
12275 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
12276 so that they aren't saved during PCH writing. */
12277
12278 void
12279 prepare_target_option_nodes_for_pch (void)
12280 {
12281 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
12282 for (; iter != cl_option_hash_table->end (); ++iter)
12283 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
12284 TREE_TARGET_GLOBALS (*iter) = NULL;
12285 }
12286
12287 /* Determine the "ultimate origin" of a block. The block may be an inlined
12288 instance of an inlined instance of a block which is local to an inline
12289 function, so we have to trace all of the way back through the origin chain
12290 to find out what sort of node actually served as the original seed for the
12291 given block. */
12292
12293 tree
12294 block_ultimate_origin (const_tree block)
12295 {
12296 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
12297
12298 /* BLOCK_ABSTRACT_ORIGIN can point to itself; ignore that if
12299 we're trying to output the abstract instance of this function. */
12300 if (BLOCK_ABSTRACT (block) && immediate_origin == block)
12301 return NULL_TREE;
12302
12303 if (immediate_origin == NULL_TREE)
12304 return NULL_TREE;
12305 else
12306 {
12307 tree ret_val;
12308 tree lookahead = immediate_origin;
12309
12310 do
12311 {
12312 ret_val = lookahead;
12313 lookahead = (TREE_CODE (ret_val) == BLOCK
12314 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
12315 }
12316 while (lookahead != NULL && lookahead != ret_val);
12317
12318 /* The block's abstract origin chain may not be the *ultimate* origin of
12319 the block. It could lead to a DECL that has an abstract origin set.
12320 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
12321 will give us if it has one). Note that DECL's abstract origins are
12322 supposed to be the most distant ancestor (or so decl_ultimate_origin
12323 claims), so we don't need to loop following the DECL origins. */
12324 if (DECL_P (ret_val))
12325 return DECL_ORIGIN (ret_val);
12326
12327 return ret_val;
12328 }
12329 }
12330
12331 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
12332 no instruction. */
12333
12334 bool
12335 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
12336 {
12337 /* Do not strip casts into or out of differing address spaces. */
12338 if (POINTER_TYPE_P (outer_type)
12339 && TYPE_ADDR_SPACE (TREE_TYPE (outer_type)) != ADDR_SPACE_GENERIC)
12340 {
12341 if (!POINTER_TYPE_P (inner_type)
12342 || (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
12343 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type))))
12344 return false;
12345 }
12346 else if (POINTER_TYPE_P (inner_type)
12347 && TYPE_ADDR_SPACE (TREE_TYPE (inner_type)) != ADDR_SPACE_GENERIC)
12348 {
12349 /* We already know that outer_type is not a pointer with
12350 a non-generic address space. */
12351 return false;
12352 }
12353
12354 /* Use precision rather then machine mode when we can, which gives
12355 the correct answer even for submode (bit-field) types. */
12356 if ((INTEGRAL_TYPE_P (outer_type)
12357 || POINTER_TYPE_P (outer_type)
12358 || TREE_CODE (outer_type) == OFFSET_TYPE)
12359 && (INTEGRAL_TYPE_P (inner_type)
12360 || POINTER_TYPE_P (inner_type)
12361 || TREE_CODE (inner_type) == OFFSET_TYPE))
12362 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
12363
12364 /* Otherwise fall back on comparing machine modes (e.g. for
12365 aggregate types, floats). */
12366 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
12367 }
12368
12369 /* Return true iff conversion in EXP generates no instruction. Mark
12370 it inline so that we fully inline into the stripping functions even
12371 though we have two uses of this function. */
12372
12373 static inline bool
12374 tree_nop_conversion (const_tree exp)
12375 {
12376 tree outer_type, inner_type;
12377
12378 if (!CONVERT_EXPR_P (exp)
12379 && TREE_CODE (exp) != NON_LVALUE_EXPR)
12380 return false;
12381 if (TREE_OPERAND (exp, 0) == error_mark_node)
12382 return false;
12383
12384 outer_type = TREE_TYPE (exp);
12385 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12386
12387 if (!inner_type)
12388 return false;
12389
12390 return tree_nop_conversion_p (outer_type, inner_type);
12391 }
12392
12393 /* Return true iff conversion in EXP generates no instruction. Don't
12394 consider conversions changing the signedness. */
12395
12396 static bool
12397 tree_sign_nop_conversion (const_tree exp)
12398 {
12399 tree outer_type, inner_type;
12400
12401 if (!tree_nop_conversion (exp))
12402 return false;
12403
12404 outer_type = TREE_TYPE (exp);
12405 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12406
12407 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
12408 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
12409 }
12410
12411 /* Strip conversions from EXP according to tree_nop_conversion and
12412 return the resulting expression. */
12413
12414 tree
12415 tree_strip_nop_conversions (tree exp)
12416 {
12417 while (tree_nop_conversion (exp))
12418 exp = TREE_OPERAND (exp, 0);
12419 return exp;
12420 }
12421
12422 /* Strip conversions from EXP according to tree_sign_nop_conversion
12423 and return the resulting expression. */
12424
12425 tree
12426 tree_strip_sign_nop_conversions (tree exp)
12427 {
12428 while (tree_sign_nop_conversion (exp))
12429 exp = TREE_OPERAND (exp, 0);
12430 return exp;
12431 }
12432
12433 /* Avoid any floating point extensions from EXP. */
12434 tree
12435 strip_float_extensions (tree exp)
12436 {
12437 tree sub, expt, subt;
12438
12439 /* For floating point constant look up the narrowest type that can hold
12440 it properly and handle it like (type)(narrowest_type)constant.
12441 This way we can optimize for instance a=a*2.0 where "a" is float
12442 but 2.0 is double constant. */
12443 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
12444 {
12445 REAL_VALUE_TYPE orig;
12446 tree type = NULL;
12447
12448 orig = TREE_REAL_CST (exp);
12449 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
12450 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
12451 type = float_type_node;
12452 else if (TYPE_PRECISION (TREE_TYPE (exp))
12453 > TYPE_PRECISION (double_type_node)
12454 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
12455 type = double_type_node;
12456 if (type)
12457 return build_real_truncate (type, orig);
12458 }
12459
12460 if (!CONVERT_EXPR_P (exp))
12461 return exp;
12462
12463 sub = TREE_OPERAND (exp, 0);
12464 subt = TREE_TYPE (sub);
12465 expt = TREE_TYPE (exp);
12466
12467 if (!FLOAT_TYPE_P (subt))
12468 return exp;
12469
12470 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
12471 return exp;
12472
12473 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
12474 return exp;
12475
12476 return strip_float_extensions (sub);
12477 }
12478
12479 /* Strip out all handled components that produce invariant
12480 offsets. */
12481
12482 const_tree
12483 strip_invariant_refs (const_tree op)
12484 {
12485 while (handled_component_p (op))
12486 {
12487 switch (TREE_CODE (op))
12488 {
12489 case ARRAY_REF:
12490 case ARRAY_RANGE_REF:
12491 if (!is_gimple_constant (TREE_OPERAND (op, 1))
12492 || TREE_OPERAND (op, 2) != NULL_TREE
12493 || TREE_OPERAND (op, 3) != NULL_TREE)
12494 return NULL;
12495 break;
12496
12497 case COMPONENT_REF:
12498 if (TREE_OPERAND (op, 2) != NULL_TREE)
12499 return NULL;
12500 break;
12501
12502 default:;
12503 }
12504 op = TREE_OPERAND (op, 0);
12505 }
12506
12507 return op;
12508 }
12509
12510 static GTY(()) tree gcc_eh_personality_decl;
12511
12512 /* Return the GCC personality function decl. */
12513
12514 tree
12515 lhd_gcc_personality (void)
12516 {
12517 if (!gcc_eh_personality_decl)
12518 gcc_eh_personality_decl = build_personality_function ("gcc");
12519 return gcc_eh_personality_decl;
12520 }
12521
12522 /* TARGET is a call target of GIMPLE call statement
12523 (obtained by gimple_call_fn). Return true if it is
12524 OBJ_TYPE_REF representing an virtual call of C++ method.
12525 (As opposed to OBJ_TYPE_REF representing objc calls
12526 through a cast where middle-end devirtualization machinery
12527 can't apply.) */
12528
12529 bool
12530 virtual_method_call_p (const_tree target)
12531 {
12532 if (TREE_CODE (target) != OBJ_TYPE_REF)
12533 return false;
12534 tree t = TREE_TYPE (target);
12535 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
12536 t = TREE_TYPE (t);
12537 if (TREE_CODE (t) == FUNCTION_TYPE)
12538 return false;
12539 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
12540 /* If we do not have BINFO associated, it means that type was built
12541 without devirtualization enabled. Do not consider this a virtual
12542 call. */
12543 if (!TYPE_BINFO (obj_type_ref_class (target)))
12544 return false;
12545 return true;
12546 }
12547
12548 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
12549
12550 tree
12551 obj_type_ref_class (const_tree ref)
12552 {
12553 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
12554 ref = TREE_TYPE (ref);
12555 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12556 ref = TREE_TYPE (ref);
12557 /* We look for type THIS points to. ObjC also builds
12558 OBJ_TYPE_REF with non-method calls, Their first parameter
12559 ID however also corresponds to class type. */
12560 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
12561 || TREE_CODE (ref) == FUNCTION_TYPE);
12562 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
12563 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12564 return TREE_TYPE (ref);
12565 }
12566
12567 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
12568
12569 static tree
12570 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
12571 {
12572 unsigned int i;
12573 tree base_binfo, b;
12574
12575 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12576 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
12577 && types_same_for_odr (TREE_TYPE (base_binfo), type))
12578 return base_binfo;
12579 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
12580 return b;
12581 return NULL;
12582 }
12583
12584 /* Try to find a base info of BINFO that would have its field decl at offset
12585 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
12586 found, return, otherwise return NULL_TREE. */
12587
12588 tree
12589 get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
12590 {
12591 tree type = BINFO_TYPE (binfo);
12592
12593 while (true)
12594 {
12595 HOST_WIDE_INT pos, size;
12596 tree fld;
12597 int i;
12598
12599 if (types_same_for_odr (type, expected_type))
12600 return binfo;
12601 if (offset < 0)
12602 return NULL_TREE;
12603
12604 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
12605 {
12606 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
12607 continue;
12608
12609 pos = int_bit_position (fld);
12610 size = tree_to_uhwi (DECL_SIZE (fld));
12611 if (pos <= offset && (pos + size) > offset)
12612 break;
12613 }
12614 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
12615 return NULL_TREE;
12616
12617 /* Offset 0 indicates the primary base, whose vtable contents are
12618 represented in the binfo for the derived class. */
12619 else if (offset != 0)
12620 {
12621 tree found_binfo = NULL, base_binfo;
12622 /* Offsets in BINFO are in bytes relative to the whole structure
12623 while POS is in bits relative to the containing field. */
12624 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
12625 / BITS_PER_UNIT);
12626
12627 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12628 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
12629 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
12630 {
12631 found_binfo = base_binfo;
12632 break;
12633 }
12634 if (found_binfo)
12635 binfo = found_binfo;
12636 else
12637 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
12638 binfo_offset);
12639 }
12640
12641 type = TREE_TYPE (fld);
12642 offset -= pos;
12643 }
12644 }
12645
12646 /* Returns true if X is a typedef decl. */
12647
12648 bool
12649 is_typedef_decl (const_tree x)
12650 {
12651 return (x && TREE_CODE (x) == TYPE_DECL
12652 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
12653 }
12654
12655 /* Returns true iff TYPE is a type variant created for a typedef. */
12656
12657 bool
12658 typedef_variant_p (const_tree type)
12659 {
12660 return is_typedef_decl (TYPE_NAME (type));
12661 }
12662
12663 /* Warn about a use of an identifier which was marked deprecated. */
12664 void
12665 warn_deprecated_use (tree node, tree attr)
12666 {
12667 const char *msg;
12668
12669 if (node == 0 || !warn_deprecated_decl)
12670 return;
12671
12672 if (!attr)
12673 {
12674 if (DECL_P (node))
12675 attr = DECL_ATTRIBUTES (node);
12676 else if (TYPE_P (node))
12677 {
12678 tree decl = TYPE_STUB_DECL (node);
12679 if (decl)
12680 attr = lookup_attribute ("deprecated",
12681 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12682 }
12683 }
12684
12685 if (attr)
12686 attr = lookup_attribute ("deprecated", attr);
12687
12688 if (attr)
12689 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
12690 else
12691 msg = NULL;
12692
12693 bool w;
12694 if (DECL_P (node))
12695 {
12696 if (msg)
12697 w = warning (OPT_Wdeprecated_declarations,
12698 "%qD is deprecated: %s", node, msg);
12699 else
12700 w = warning (OPT_Wdeprecated_declarations,
12701 "%qD is deprecated", node);
12702 if (w)
12703 inform (DECL_SOURCE_LOCATION (node), "declared here");
12704 }
12705 else if (TYPE_P (node))
12706 {
12707 tree what = NULL_TREE;
12708 tree decl = TYPE_STUB_DECL (node);
12709
12710 if (TYPE_NAME (node))
12711 {
12712 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12713 what = TYPE_NAME (node);
12714 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12715 && DECL_NAME (TYPE_NAME (node)))
12716 what = DECL_NAME (TYPE_NAME (node));
12717 }
12718
12719 if (decl)
12720 {
12721 if (what)
12722 {
12723 if (msg)
12724 w = warning (OPT_Wdeprecated_declarations,
12725 "%qE is deprecated: %s", what, msg);
12726 else
12727 w = warning (OPT_Wdeprecated_declarations,
12728 "%qE is deprecated", what);
12729 }
12730 else
12731 {
12732 if (msg)
12733 w = warning (OPT_Wdeprecated_declarations,
12734 "type is deprecated: %s", msg);
12735 else
12736 w = warning (OPT_Wdeprecated_declarations,
12737 "type is deprecated");
12738 }
12739 if (w)
12740 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12741 }
12742 else
12743 {
12744 if (what)
12745 {
12746 if (msg)
12747 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
12748 what, msg);
12749 else
12750 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
12751 }
12752 else
12753 {
12754 if (msg)
12755 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
12756 msg);
12757 else
12758 warning (OPT_Wdeprecated_declarations, "type is deprecated");
12759 }
12760 }
12761 }
12762 }
12763
12764 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12765 somewhere in it. */
12766
12767 bool
12768 contains_bitfld_component_ref_p (const_tree ref)
12769 {
12770 while (handled_component_p (ref))
12771 {
12772 if (TREE_CODE (ref) == COMPONENT_REF
12773 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12774 return true;
12775 ref = TREE_OPERAND (ref, 0);
12776 }
12777
12778 return false;
12779 }
12780
12781 /* Try to determine whether a TRY_CATCH expression can fall through.
12782 This is a subroutine of block_may_fallthru. */
12783
12784 static bool
12785 try_catch_may_fallthru (const_tree stmt)
12786 {
12787 tree_stmt_iterator i;
12788
12789 /* If the TRY block can fall through, the whole TRY_CATCH can
12790 fall through. */
12791 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12792 return true;
12793
12794 i = tsi_start (TREE_OPERAND (stmt, 1));
12795 switch (TREE_CODE (tsi_stmt (i)))
12796 {
12797 case CATCH_EXPR:
12798 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12799 catch expression and a body. The whole TRY_CATCH may fall
12800 through iff any of the catch bodies falls through. */
12801 for (; !tsi_end_p (i); tsi_next (&i))
12802 {
12803 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12804 return true;
12805 }
12806 return false;
12807
12808 case EH_FILTER_EXPR:
12809 /* The exception filter expression only matters if there is an
12810 exception. If the exception does not match EH_FILTER_TYPES,
12811 we will execute EH_FILTER_FAILURE, and we will fall through
12812 if that falls through. If the exception does match
12813 EH_FILTER_TYPES, the stack unwinder will continue up the
12814 stack, so we will not fall through. We don't know whether we
12815 will throw an exception which matches EH_FILTER_TYPES or not,
12816 so we just ignore EH_FILTER_TYPES and assume that we might
12817 throw an exception which doesn't match. */
12818 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12819
12820 default:
12821 /* This case represents statements to be executed when an
12822 exception occurs. Those statements are implicitly followed
12823 by a RESX statement to resume execution after the exception.
12824 So in this case the TRY_CATCH never falls through. */
12825 return false;
12826 }
12827 }
12828
12829 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12830 need not be 100% accurate; simply be conservative and return true if we
12831 don't know. This is used only to avoid stupidly generating extra code.
12832 If we're wrong, we'll just delete the extra code later. */
12833
12834 bool
12835 block_may_fallthru (const_tree block)
12836 {
12837 /* This CONST_CAST is okay because expr_last returns its argument
12838 unmodified and we assign it to a const_tree. */
12839 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12840
12841 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12842 {
12843 case GOTO_EXPR:
12844 case RETURN_EXPR:
12845 /* Easy cases. If the last statement of the block implies
12846 control transfer, then we can't fall through. */
12847 return false;
12848
12849 case SWITCH_EXPR:
12850 /* If SWITCH_LABELS is set, this is lowered, and represents a
12851 branch to a selected label and hence can not fall through.
12852 Otherwise SWITCH_BODY is set, and the switch can fall
12853 through. */
12854 return SWITCH_LABELS (stmt) == NULL_TREE;
12855
12856 case COND_EXPR:
12857 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12858 return true;
12859 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12860
12861 case BIND_EXPR:
12862 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12863
12864 case TRY_CATCH_EXPR:
12865 return try_catch_may_fallthru (stmt);
12866
12867 case TRY_FINALLY_EXPR:
12868 /* The finally clause is always executed after the try clause,
12869 so if it does not fall through, then the try-finally will not
12870 fall through. Otherwise, if the try clause does not fall
12871 through, then when the finally clause falls through it will
12872 resume execution wherever the try clause was going. So the
12873 whole try-finally will only fall through if both the try
12874 clause and the finally clause fall through. */
12875 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12876 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12877
12878 case MODIFY_EXPR:
12879 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12880 stmt = TREE_OPERAND (stmt, 1);
12881 else
12882 return true;
12883 /* FALLTHRU */
12884
12885 case CALL_EXPR:
12886 /* Functions that do not return do not fall through. */
12887 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12888
12889 case CLEANUP_POINT_EXPR:
12890 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12891
12892 case TARGET_EXPR:
12893 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12894
12895 case ERROR_MARK:
12896 return true;
12897
12898 default:
12899 return lang_hooks.block_may_fallthru (stmt);
12900 }
12901 }
12902
12903 /* True if we are using EH to handle cleanups. */
12904 static bool using_eh_for_cleanups_flag = false;
12905
12906 /* This routine is called from front ends to indicate eh should be used for
12907 cleanups. */
12908 void
12909 using_eh_for_cleanups (void)
12910 {
12911 using_eh_for_cleanups_flag = true;
12912 }
12913
12914 /* Query whether EH is used for cleanups. */
12915 bool
12916 using_eh_for_cleanups_p (void)
12917 {
12918 return using_eh_for_cleanups_flag;
12919 }
12920
12921 /* Wrapper for tree_code_name to ensure that tree code is valid */
12922 const char *
12923 get_tree_code_name (enum tree_code code)
12924 {
12925 const char *invalid = "<invalid tree code>";
12926
12927 if (code >= MAX_TREE_CODES)
12928 return invalid;
12929
12930 return tree_code_name[code];
12931 }
12932
12933 /* Drops the TREE_OVERFLOW flag from T. */
12934
12935 tree
12936 drop_tree_overflow (tree t)
12937 {
12938 gcc_checking_assert (TREE_OVERFLOW (t));
12939
12940 /* For tree codes with a sharing machinery re-build the result. */
12941 if (TREE_CODE (t) == INTEGER_CST)
12942 return wide_int_to_tree (TREE_TYPE (t), t);
12943
12944 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12945 and drop the flag. */
12946 t = copy_node (t);
12947 TREE_OVERFLOW (t) = 0;
12948 return t;
12949 }
12950
12951 /* Given a memory reference expression T, return its base address.
12952 The base address of a memory reference expression is the main
12953 object being referenced. For instance, the base address for
12954 'array[i].fld[j]' is 'array'. You can think of this as stripping
12955 away the offset part from a memory address.
12956
12957 This function calls handled_component_p to strip away all the inner
12958 parts of the memory reference until it reaches the base object. */
12959
12960 tree
12961 get_base_address (tree t)
12962 {
12963 while (handled_component_p (t))
12964 t = TREE_OPERAND (t, 0);
12965
12966 if ((TREE_CODE (t) == MEM_REF
12967 || TREE_CODE (t) == TARGET_MEM_REF)
12968 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12969 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12970
12971 /* ??? Either the alias oracle or all callers need to properly deal
12972 with WITH_SIZE_EXPRs before we can look through those. */
12973 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12974 return NULL_TREE;
12975
12976 return t;
12977 }
12978
12979 /* Return a tree of sizetype representing the size, in bytes, of the element
12980 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12981
12982 tree
12983 array_ref_element_size (tree exp)
12984 {
12985 tree aligned_size = TREE_OPERAND (exp, 3);
12986 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
12987 location_t loc = EXPR_LOCATION (exp);
12988
12989 /* If a size was specified in the ARRAY_REF, it's the size measured
12990 in alignment units of the element type. So multiply by that value. */
12991 if (aligned_size)
12992 {
12993 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12994 sizetype from another type of the same width and signedness. */
12995 if (TREE_TYPE (aligned_size) != sizetype)
12996 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
12997 return size_binop_loc (loc, MULT_EXPR, aligned_size,
12998 size_int (TYPE_ALIGN_UNIT (elmt_type)));
12999 }
13000
13001 /* Otherwise, take the size from that of the element type. Substitute
13002 any PLACEHOLDER_EXPR that we have. */
13003 else
13004 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
13005 }
13006
13007 /* Return a tree representing the lower bound of the array mentioned in
13008 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
13009
13010 tree
13011 array_ref_low_bound (tree exp)
13012 {
13013 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
13014
13015 /* If a lower bound is specified in EXP, use it. */
13016 if (TREE_OPERAND (exp, 2))
13017 return TREE_OPERAND (exp, 2);
13018
13019 /* Otherwise, if there is a domain type and it has a lower bound, use it,
13020 substituting for a PLACEHOLDER_EXPR as needed. */
13021 if (domain_type && TYPE_MIN_VALUE (domain_type))
13022 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
13023
13024 /* Otherwise, return a zero of the appropriate type. */
13025 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
13026 }
13027
13028 /* Return a tree representing the upper bound of the array mentioned in
13029 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
13030
13031 tree
13032 array_ref_up_bound (tree exp)
13033 {
13034 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
13035
13036 /* If there is a domain type and it has an upper bound, use it, substituting
13037 for a PLACEHOLDER_EXPR as needed. */
13038 if (domain_type && TYPE_MAX_VALUE (domain_type))
13039 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
13040
13041 /* Otherwise fail. */
13042 return NULL_TREE;
13043 }
13044
13045 /* Returns true if REF is an array reference to an array at the end of
13046 a structure. If this is the case, the array may be allocated larger
13047 than its upper bound implies. */
13048
13049 bool
13050 array_at_struct_end_p (tree ref)
13051 {
13052 if (TREE_CODE (ref) != ARRAY_REF
13053 && TREE_CODE (ref) != ARRAY_RANGE_REF)
13054 return false;
13055
13056 while (handled_component_p (ref))
13057 {
13058 /* If the reference chain contains a component reference to a
13059 non-union type and there follows another field the reference
13060 is not at the end of a structure. */
13061 if (TREE_CODE (ref) == COMPONENT_REF
13062 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
13063 {
13064 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
13065 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
13066 nextf = DECL_CHAIN (nextf);
13067 if (nextf)
13068 return false;
13069 }
13070
13071 ref = TREE_OPERAND (ref, 0);
13072 }
13073
13074 tree size = NULL;
13075
13076 if (TREE_CODE (ref) == MEM_REF
13077 && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
13078 {
13079 size = TYPE_SIZE (TREE_TYPE (ref));
13080 ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
13081 }
13082
13083 /* If the reference is based on a declared entity, the size of the array
13084 is constrained by its given domain. (Do not trust commons PR/69368). */
13085 if (DECL_P (ref)
13086 /* Be sure the size of MEM_REF target match. For example:
13087
13088 char buf[10];
13089 struct foo *str = (struct foo *)&buf;
13090
13091 str->trailin_array[2] = 1;
13092
13093 is valid because BUF allocate enough space. */
13094
13095 && (!size || (DECL_SIZE (ref) != NULL
13096 && operand_equal_p (DECL_SIZE (ref), size, 0)))
13097 && !(flag_unconstrained_commons
13098 && TREE_CODE (ref) == VAR_DECL && DECL_COMMON (ref)))
13099 return false;
13100
13101 return true;
13102 }
13103
13104 /* Return a tree representing the offset, in bytes, of the field referenced
13105 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
13106
13107 tree
13108 component_ref_field_offset (tree exp)
13109 {
13110 tree aligned_offset = TREE_OPERAND (exp, 2);
13111 tree field = TREE_OPERAND (exp, 1);
13112 location_t loc = EXPR_LOCATION (exp);
13113
13114 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
13115 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
13116 value. */
13117 if (aligned_offset)
13118 {
13119 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
13120 sizetype from another type of the same width and signedness. */
13121 if (TREE_TYPE (aligned_offset) != sizetype)
13122 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
13123 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
13124 size_int (DECL_OFFSET_ALIGN (field)
13125 / BITS_PER_UNIT));
13126 }
13127
13128 /* Otherwise, take the offset from that of the field. Substitute
13129 any PLACEHOLDER_EXPR that we have. */
13130 else
13131 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
13132 }
13133
13134 /* Return the machine mode of T. For vectors, returns the mode of the
13135 inner type. The main use case is to feed the result to HONOR_NANS,
13136 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
13137
13138 machine_mode
13139 element_mode (const_tree t)
13140 {
13141 if (!TYPE_P (t))
13142 t = TREE_TYPE (t);
13143 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
13144 t = TREE_TYPE (t);
13145 return TYPE_MODE (t);
13146 }
13147
13148
13149 /* Veirfy that basic properties of T match TV and thus T can be a variant of
13150 TV. TV should be the more specified variant (i.e. the main variant). */
13151
13152 static bool
13153 verify_type_variant (const_tree t, tree tv)
13154 {
13155 /* Type variant can differ by:
13156
13157 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
13158 ENCODE_QUAL_ADDR_SPACE.
13159 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
13160 in this case some values may not be set in the variant types
13161 (see TYPE_COMPLETE_P checks).
13162 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
13163 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
13164 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
13165 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
13166 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
13167 this is necessary to make it possible to merge types form different TUs
13168 - arrays, pointers and references may have TREE_TYPE that is a variant
13169 of TREE_TYPE of their main variants.
13170 - aggregates may have new TYPE_FIELDS list that list variants of
13171 the main variant TYPE_FIELDS.
13172 - vector types may differ by TYPE_VECTOR_OPAQUE
13173 - TYPE_METHODS is always NULL for vairant types and maintained for
13174 main variant only.
13175 */
13176
13177 /* Convenience macro for matching individual fields. */
13178 #define verify_variant_match(flag) \
13179 do { \
13180 if (flag (tv) != flag (t)) \
13181 { \
13182 error ("type variant differs by " #flag "."); \
13183 debug_tree (tv); \
13184 return false; \
13185 } \
13186 } while (false)
13187
13188 /* tree_base checks. */
13189
13190 verify_variant_match (TREE_CODE);
13191 /* FIXME: Ada builds non-artificial variants of artificial types. */
13192 if (TYPE_ARTIFICIAL (tv) && 0)
13193 verify_variant_match (TYPE_ARTIFICIAL);
13194 if (POINTER_TYPE_P (tv))
13195 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
13196 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
13197 verify_variant_match (TYPE_UNSIGNED);
13198 verify_variant_match (TYPE_PACKED);
13199 if (TREE_CODE (t) == REFERENCE_TYPE)
13200 verify_variant_match (TYPE_REF_IS_RVALUE);
13201 if (AGGREGATE_TYPE_P (t))
13202 verify_variant_match (TYPE_REVERSE_STORAGE_ORDER);
13203 else
13204 verify_variant_match (TYPE_SATURATING);
13205 /* FIXME: This check trigger during libstdc++ build. */
13206 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t) && 0)
13207 verify_variant_match (TYPE_FINAL_P);
13208
13209 /* tree_type_common checks. */
13210
13211 if (COMPLETE_TYPE_P (t))
13212 {
13213 verify_variant_match (TYPE_MODE);
13214 if (TREE_CODE (TYPE_SIZE (t)) != PLACEHOLDER_EXPR
13215 && TREE_CODE (TYPE_SIZE (tv)) != PLACEHOLDER_EXPR)
13216 verify_variant_match (TYPE_SIZE);
13217 if (TREE_CODE (TYPE_SIZE_UNIT (t)) != PLACEHOLDER_EXPR
13218 && TREE_CODE (TYPE_SIZE_UNIT (tv)) != PLACEHOLDER_EXPR
13219 && TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv)
13220 /* FIXME: ideally we should compare pointer equality, but java FE
13221 produce variants where size is INTEGER_CST of different type (int
13222 wrt size_type) during libjava biuld. */
13223 && !operand_equal_p (TYPE_SIZE_UNIT (t), TYPE_SIZE_UNIT (tv), 0))
13224 {
13225 error ("type variant has different TYPE_SIZE_UNIT");
13226 debug_tree (tv);
13227 error ("type variant's TYPE_SIZE_UNIT");
13228 debug_tree (TYPE_SIZE_UNIT (tv));
13229 error ("type's TYPE_SIZE_UNIT");
13230 debug_tree (TYPE_SIZE_UNIT (t));
13231 return false;
13232 }
13233 }
13234 verify_variant_match (TYPE_PRECISION);
13235 verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
13236 if (RECORD_OR_UNION_TYPE_P (t))
13237 verify_variant_match (TYPE_TRANSPARENT_AGGR);
13238 else if (TREE_CODE (t) == ARRAY_TYPE)
13239 verify_variant_match (TYPE_NONALIASED_COMPONENT);
13240 /* During LTO we merge variant lists from diferent translation units
13241 that may differ BY TYPE_CONTEXT that in turn may point
13242 to TRANSLATION_UNIT_DECL.
13243 Ada also builds variants of types with different TYPE_CONTEXT. */
13244 if ((!in_lto_p || !TYPE_FILE_SCOPE_P (t)) && 0)
13245 verify_variant_match (TYPE_CONTEXT);
13246 verify_variant_match (TYPE_STRING_FLAG);
13247 if (TYPE_ALIAS_SET_KNOWN_P (t))
13248 {
13249 error ("type variant with TYPE_ALIAS_SET_KNOWN_P");
13250 debug_tree (tv);
13251 return false;
13252 }
13253
13254 /* tree_type_non_common checks. */
13255
13256 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13257 and dangle the pointer from time to time. */
13258 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
13259 && (in_lto_p || !TYPE_VFIELD (tv)
13260 || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
13261 {
13262 error ("type variant has different TYPE_VFIELD");
13263 debug_tree (tv);
13264 return false;
13265 }
13266 if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
13267 || TREE_CODE (t) == INTEGER_TYPE
13268 || TREE_CODE (t) == BOOLEAN_TYPE
13269 || TREE_CODE (t) == REAL_TYPE
13270 || TREE_CODE (t) == FIXED_POINT_TYPE)
13271 {
13272 verify_variant_match (TYPE_MAX_VALUE);
13273 verify_variant_match (TYPE_MIN_VALUE);
13274 }
13275 if (TREE_CODE (t) == METHOD_TYPE)
13276 verify_variant_match (TYPE_METHOD_BASETYPE);
13277 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_METHODS (t))
13278 {
13279 error ("type variant has TYPE_METHODS");
13280 debug_tree (tv);
13281 return false;
13282 }
13283 if (TREE_CODE (t) == OFFSET_TYPE)
13284 verify_variant_match (TYPE_OFFSET_BASETYPE);
13285 if (TREE_CODE (t) == ARRAY_TYPE)
13286 verify_variant_match (TYPE_ARRAY_MAX_SIZE);
13287 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
13288 or even type's main variant. This is needed to make bootstrap pass
13289 and the bug seems new in GCC 5.
13290 C++ FE should be updated to make this consistent and we should check
13291 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
13292 is a match with main variant.
13293
13294 Also disable the check for Java for now because of parser hack that builds
13295 first an dummy BINFO and then sometimes replace it by real BINFO in some
13296 of the copies. */
13297 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
13298 && TYPE_BINFO (t) != TYPE_BINFO (tv)
13299 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
13300 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
13301 at LTO time only. */
13302 && (in_lto_p && odr_type_p (t)))
13303 {
13304 error ("type variant has different TYPE_BINFO");
13305 debug_tree (tv);
13306 error ("type variant's TYPE_BINFO");
13307 debug_tree (TYPE_BINFO (tv));
13308 error ("type's TYPE_BINFO");
13309 debug_tree (TYPE_BINFO (t));
13310 return false;
13311 }
13312
13313 /* Check various uses of TYPE_VALUES_RAW. */
13314 if (TREE_CODE (t) == ENUMERAL_TYPE)
13315 verify_variant_match (TYPE_VALUES);
13316 else if (TREE_CODE (t) == ARRAY_TYPE)
13317 verify_variant_match (TYPE_DOMAIN);
13318 /* Permit incomplete variants of complete type. While FEs may complete
13319 all variants, this does not happen for C++ templates in all cases. */
13320 else if (RECORD_OR_UNION_TYPE_P (t)
13321 && COMPLETE_TYPE_P (t)
13322 && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
13323 {
13324 tree f1, f2;
13325
13326 /* Fortran builds qualified variants as new records with items of
13327 qualified type. Verify that they looks same. */
13328 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
13329 f1 && f2;
13330 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13331 if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
13332 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
13333 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
13334 /* FIXME: gfc_nonrestricted_type builds all types as variants
13335 with exception of pointer types. It deeply copies the type
13336 which means that we may end up with a variant type
13337 referring non-variant pointer. We may change it to
13338 produce types as variants, too, like
13339 objc_get_protocol_qualified_type does. */
13340 && !POINTER_TYPE_P (TREE_TYPE (f1)))
13341 || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
13342 || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
13343 break;
13344 if (f1 || f2)
13345 {
13346 error ("type variant has different TYPE_FIELDS");
13347 debug_tree (tv);
13348 error ("first mismatch is field");
13349 debug_tree (f1);
13350 error ("and field");
13351 debug_tree (f2);
13352 return false;
13353 }
13354 }
13355 else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE))
13356 verify_variant_match (TYPE_ARG_TYPES);
13357 /* For C++ the qualified variant of array type is really an array type
13358 of qualified TREE_TYPE.
13359 objc builds variants of pointer where pointer to type is a variant, too
13360 in objc_get_protocol_qualified_type. */
13361 if (TREE_TYPE (t) != TREE_TYPE (tv)
13362 && ((TREE_CODE (t) != ARRAY_TYPE
13363 && !POINTER_TYPE_P (t))
13364 || TYPE_MAIN_VARIANT (TREE_TYPE (t))
13365 != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
13366 {
13367 error ("type variant has different TREE_TYPE");
13368 debug_tree (tv);
13369 error ("type variant's TREE_TYPE");
13370 debug_tree (TREE_TYPE (tv));
13371 error ("type's TREE_TYPE");
13372 debug_tree (TREE_TYPE (t));
13373 return false;
13374 }
13375 if (type_with_alias_set_p (t)
13376 && !gimple_canonical_types_compatible_p (t, tv, false))
13377 {
13378 error ("type is not compatible with its vairant");
13379 debug_tree (tv);
13380 error ("type variant's TREE_TYPE");
13381 debug_tree (TREE_TYPE (tv));
13382 error ("type's TREE_TYPE");
13383 debug_tree (TREE_TYPE (t));
13384 return false;
13385 }
13386 return true;
13387 #undef verify_variant_match
13388 }
13389
13390
13391 /* The TYPE_CANONICAL merging machinery. It should closely resemble
13392 the middle-end types_compatible_p function. It needs to avoid
13393 claiming types are different for types that should be treated
13394 the same with respect to TBAA. Canonical types are also used
13395 for IL consistency checks via the useless_type_conversion_p
13396 predicate which does not handle all type kinds itself but falls
13397 back to pointer-comparison of TYPE_CANONICAL for aggregates
13398 for example. */
13399
13400 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
13401 type calculation because we need to allow inter-operability between signed
13402 and unsigned variants. */
13403
13404 bool
13405 type_with_interoperable_signedness (const_tree type)
13406 {
13407 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
13408 signed char and unsigned char. Similarly fortran FE builds
13409 C_SIZE_T as signed type, while C defines it unsigned. */
13410
13411 return tree_code_for_canonical_type_merging (TREE_CODE (type))
13412 == INTEGER_TYPE
13413 && (TYPE_PRECISION (type) == TYPE_PRECISION (signed_char_type_node)
13414 || TYPE_PRECISION (type) == TYPE_PRECISION (size_type_node));
13415 }
13416
13417 /* Return true iff T1 and T2 are structurally identical for what
13418 TBAA is concerned.
13419 This function is used both by lto.c canonical type merging and by the
13420 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
13421 that have TYPE_CANONICAL defined and assume them equivalent. This is useful
13422 only for LTO because only in these cases TYPE_CANONICAL equivalence
13423 correspond to one defined by gimple_canonical_types_compatible_p. */
13424
13425 bool
13426 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
13427 bool trust_type_canonical)
13428 {
13429 /* Type variants should be same as the main variant. When not doing sanity
13430 checking to verify this fact, go to main variants and save some work. */
13431 if (trust_type_canonical)
13432 {
13433 t1 = TYPE_MAIN_VARIANT (t1);
13434 t2 = TYPE_MAIN_VARIANT (t2);
13435 }
13436
13437 /* Check first for the obvious case of pointer identity. */
13438 if (t1 == t2)
13439 return true;
13440
13441 /* Check that we have two types to compare. */
13442 if (t1 == NULL_TREE || t2 == NULL_TREE)
13443 return false;
13444
13445 /* We consider complete types always compatible with incomplete type.
13446 This does not make sense for canonical type calculation and thus we
13447 need to ensure that we are never called on it.
13448
13449 FIXME: For more correctness the function probably should have three modes
13450 1) mode assuming that types are complete mathcing their structure
13451 2) mode allowing incomplete types but producing equivalence classes
13452 and thus ignoring all info from complete types
13453 3) mode allowing incomplete types to match complete but checking
13454 compatibility between complete types.
13455
13456 1 and 2 can be used for canonical type calculation. 3 is the real
13457 definition of type compatibility that can be used i.e. for warnings during
13458 declaration merging. */
13459
13460 gcc_assert (!trust_type_canonical
13461 || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
13462 /* If the types have been previously registered and found equal
13463 they still are. */
13464
13465 if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
13466 && trust_type_canonical)
13467 {
13468 /* Do not use TYPE_CANONICAL of pointer types. For LTO streamed types
13469 they are always NULL, but they are set to non-NULL for types
13470 constructed by build_pointer_type and variants. In this case the
13471 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
13472 all pointers are considered equal. Be sure to not return false
13473 negatives. */
13474 gcc_checking_assert (canonical_type_used_p (t1)
13475 && canonical_type_used_p (t2));
13476 return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
13477 }
13478
13479 /* Can't be the same type if the types don't have the same code. */
13480 enum tree_code code = tree_code_for_canonical_type_merging (TREE_CODE (t1));
13481 if (code != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
13482 return false;
13483
13484 /* Qualifiers do not matter for canonical type comparison purposes. */
13485
13486 /* Void types and nullptr types are always the same. */
13487 if (TREE_CODE (t1) == VOID_TYPE
13488 || TREE_CODE (t1) == NULLPTR_TYPE)
13489 return true;
13490
13491 /* Can't be the same type if they have different mode. */
13492 if (TYPE_MODE (t1) != TYPE_MODE (t2))
13493 return false;
13494
13495 /* Non-aggregate types can be handled cheaply. */
13496 if (INTEGRAL_TYPE_P (t1)
13497 || SCALAR_FLOAT_TYPE_P (t1)
13498 || FIXED_POINT_TYPE_P (t1)
13499 || TREE_CODE (t1) == VECTOR_TYPE
13500 || TREE_CODE (t1) == COMPLEX_TYPE
13501 || TREE_CODE (t1) == OFFSET_TYPE
13502 || POINTER_TYPE_P (t1))
13503 {
13504 /* Can't be the same type if they have different recision. */
13505 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
13506 return false;
13507
13508 /* In some cases the signed and unsigned types are required to be
13509 inter-operable. */
13510 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)
13511 && !type_with_interoperable_signedness (t1))
13512 return false;
13513
13514 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
13515 interoperable with "signed char". Unless all frontends are revisited
13516 to agree on these types, we must ignore the flag completely. */
13517
13518 /* Fortran standard define C_PTR type that is compatible with every
13519 C pointer. For this reason we need to glob all pointers into one.
13520 Still pointers in different address spaces are not compatible. */
13521 if (POINTER_TYPE_P (t1))
13522 {
13523 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
13524 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
13525 return false;
13526 }
13527
13528 /* Tail-recurse to components. */
13529 if (TREE_CODE (t1) == VECTOR_TYPE
13530 || TREE_CODE (t1) == COMPLEX_TYPE)
13531 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
13532 TREE_TYPE (t2),
13533 trust_type_canonical);
13534
13535 return true;
13536 }
13537
13538 /* Do type-specific comparisons. */
13539 switch (TREE_CODE (t1))
13540 {
13541 case ARRAY_TYPE:
13542 /* Array types are the same if the element types are the same and
13543 the number of elements are the same. */
13544 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13545 trust_type_canonical)
13546 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
13547 || TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2)
13548 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
13549 return false;
13550 else
13551 {
13552 tree i1 = TYPE_DOMAIN (t1);
13553 tree i2 = TYPE_DOMAIN (t2);
13554
13555 /* For an incomplete external array, the type domain can be
13556 NULL_TREE. Check this condition also. */
13557 if (i1 == NULL_TREE && i2 == NULL_TREE)
13558 return true;
13559 else if (i1 == NULL_TREE || i2 == NULL_TREE)
13560 return false;
13561 else
13562 {
13563 tree min1 = TYPE_MIN_VALUE (i1);
13564 tree min2 = TYPE_MIN_VALUE (i2);
13565 tree max1 = TYPE_MAX_VALUE (i1);
13566 tree max2 = TYPE_MAX_VALUE (i2);
13567
13568 /* The minimum/maximum values have to be the same. */
13569 if ((min1 == min2
13570 || (min1 && min2
13571 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
13572 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
13573 || operand_equal_p (min1, min2, 0))))
13574 && (max1 == max2
13575 || (max1 && max2
13576 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
13577 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
13578 || operand_equal_p (max1, max2, 0)))))
13579 return true;
13580 else
13581 return false;
13582 }
13583 }
13584
13585 case METHOD_TYPE:
13586 case FUNCTION_TYPE:
13587 /* Function types are the same if the return type and arguments types
13588 are the same. */
13589 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13590 trust_type_canonical))
13591 return false;
13592
13593 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
13594 return true;
13595 else
13596 {
13597 tree parms1, parms2;
13598
13599 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
13600 parms1 && parms2;
13601 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
13602 {
13603 if (!gimple_canonical_types_compatible_p
13604 (TREE_VALUE (parms1), TREE_VALUE (parms2),
13605 trust_type_canonical))
13606 return false;
13607 }
13608
13609 if (parms1 || parms2)
13610 return false;
13611
13612 return true;
13613 }
13614
13615 case RECORD_TYPE:
13616 case UNION_TYPE:
13617 case QUAL_UNION_TYPE:
13618 {
13619 tree f1, f2;
13620
13621 /* Don't try to compare variants of an incomplete type, before
13622 TYPE_FIELDS has been copied around. */
13623 if (!COMPLETE_TYPE_P (t1) && !COMPLETE_TYPE_P (t2))
13624 return true;
13625
13626
13627 if (TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2))
13628 return false;
13629
13630 /* For aggregate types, all the fields must be the same. */
13631 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
13632 f1 || f2;
13633 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13634 {
13635 /* Skip non-fields. */
13636 while (f1 && TREE_CODE (f1) != FIELD_DECL)
13637 f1 = TREE_CHAIN (f1);
13638 while (f2 && TREE_CODE (f2) != FIELD_DECL)
13639 f2 = TREE_CHAIN (f2);
13640 if (!f1 || !f2)
13641 break;
13642 /* The fields must have the same name, offset and type. */
13643 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
13644 || !gimple_compare_field_offset (f1, f2)
13645 || !gimple_canonical_types_compatible_p
13646 (TREE_TYPE (f1), TREE_TYPE (f2),
13647 trust_type_canonical))
13648 return false;
13649 }
13650
13651 /* If one aggregate has more fields than the other, they
13652 are not the same. */
13653 if (f1 || f2)
13654 return false;
13655
13656 return true;
13657 }
13658
13659 default:
13660 /* Consider all types with language specific trees in them mutually
13661 compatible. This is executed only from verify_type and false
13662 positives can be tolerated. */
13663 gcc_assert (!in_lto_p);
13664 return true;
13665 }
13666 }
13667
13668 /* Verify type T. */
13669
13670 void
13671 verify_type (const_tree t)
13672 {
13673 bool error_found = false;
13674 tree mv = TYPE_MAIN_VARIANT (t);
13675 if (!mv)
13676 {
13677 error ("Main variant is not defined");
13678 error_found = true;
13679 }
13680 else if (mv != TYPE_MAIN_VARIANT (mv))
13681 {
13682 error ("TYPE_MAIN_VARIANT has different TYPE_MAIN_VARIANT");
13683 debug_tree (mv);
13684 error_found = true;
13685 }
13686 else if (t != mv && !verify_type_variant (t, mv))
13687 error_found = true;
13688
13689 tree ct = TYPE_CANONICAL (t);
13690 if (!ct)
13691 ;
13692 else if (TYPE_CANONICAL (t) != ct)
13693 {
13694 error ("TYPE_CANONICAL has different TYPE_CANONICAL");
13695 debug_tree (ct);
13696 error_found = true;
13697 }
13698 /* Method and function types can not be used to address memory and thus
13699 TYPE_CANONICAL really matters only for determining useless conversions.
13700
13701 FIXME: C++ FE produce declarations of builtin functions that are not
13702 compatible with main variants. */
13703 else if (TREE_CODE (t) == FUNCTION_TYPE)
13704 ;
13705 else if (t != ct
13706 /* FIXME: gimple_canonical_types_compatible_p can not compare types
13707 with variably sized arrays because their sizes possibly
13708 gimplified to different variables. */
13709 && !variably_modified_type_p (ct, NULL)
13710 && !gimple_canonical_types_compatible_p (t, ct, false))
13711 {
13712 error ("TYPE_CANONICAL is not compatible");
13713 debug_tree (ct);
13714 error_found = true;
13715 }
13716
13717 if (COMPLETE_TYPE_P (t) && TYPE_CANONICAL (t)
13718 && TYPE_MODE (t) != TYPE_MODE (TYPE_CANONICAL (t)))
13719 {
13720 error ("TYPE_MODE of TYPE_CANONICAL is not compatible");
13721 debug_tree (ct);
13722 error_found = true;
13723 }
13724 /* FIXME: this is violated by the C++ FE as discussed in PR70029, when
13725 FUNCTION_*_QUALIFIED flags are set. */
13726 if (0 && TYPE_MAIN_VARIANT (t) == t && ct && TYPE_MAIN_VARIANT (ct) != ct)
13727 {
13728 error ("TYPE_CANONICAL of main variant is not main variant");
13729 debug_tree (ct);
13730 debug_tree (TYPE_MAIN_VARIANT (ct));
13731 error_found = true;
13732 }
13733
13734
13735 /* Check various uses of TYPE_MINVAL. */
13736 if (RECORD_OR_UNION_TYPE_P (t))
13737 {
13738 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13739 and danagle the pointer from time to time. */
13740 if (TYPE_VFIELD (t)
13741 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
13742 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
13743 {
13744 error ("TYPE_VFIELD is not FIELD_DECL nor TREE_LIST");
13745 debug_tree (TYPE_VFIELD (t));
13746 error_found = true;
13747 }
13748 }
13749 else if (TREE_CODE (t) == POINTER_TYPE)
13750 {
13751 if (TYPE_NEXT_PTR_TO (t)
13752 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
13753 {
13754 error ("TYPE_NEXT_PTR_TO is not POINTER_TYPE");
13755 debug_tree (TYPE_NEXT_PTR_TO (t));
13756 error_found = true;
13757 }
13758 }
13759 else if (TREE_CODE (t) == REFERENCE_TYPE)
13760 {
13761 if (TYPE_NEXT_REF_TO (t)
13762 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
13763 {
13764 error ("TYPE_NEXT_REF_TO is not REFERENCE_TYPE");
13765 debug_tree (TYPE_NEXT_REF_TO (t));
13766 error_found = true;
13767 }
13768 }
13769 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13770 || TREE_CODE (t) == FIXED_POINT_TYPE)
13771 {
13772 /* FIXME: The following check should pass:
13773 useless_type_conversion_p (const_cast <tree> (t),
13774 TREE_TYPE (TYPE_MIN_VALUE (t))
13775 but does not for C sizetypes in LTO. */
13776 }
13777 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
13778 else if (TYPE_MINVAL (t)
13779 && ((TREE_CODE (t) != METHOD_TYPE && TREE_CODE (t) != FUNCTION_TYPE)
13780 || in_lto_p))
13781 {
13782 error ("TYPE_MINVAL non-NULL");
13783 debug_tree (TYPE_MINVAL (t));
13784 error_found = true;
13785 }
13786
13787 /* Check various uses of TYPE_MAXVAL. */
13788 if (RECORD_OR_UNION_TYPE_P (t))
13789 {
13790 if (TYPE_METHODS (t) && TREE_CODE (TYPE_METHODS (t)) != FUNCTION_DECL
13791 && TREE_CODE (TYPE_METHODS (t)) != TEMPLATE_DECL
13792 && TYPE_METHODS (t) != error_mark_node)
13793 {
13794 error ("TYPE_METHODS is not FUNCTION_DECL, TEMPLATE_DECL nor error_mark_node");
13795 debug_tree (TYPE_METHODS (t));
13796 error_found = true;
13797 }
13798 }
13799 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13800 {
13801 if (TYPE_METHOD_BASETYPE (t)
13802 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
13803 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
13804 {
13805 error ("TYPE_METHOD_BASETYPE is not record nor union");
13806 debug_tree (TYPE_METHOD_BASETYPE (t));
13807 error_found = true;
13808 }
13809 }
13810 else if (TREE_CODE (t) == OFFSET_TYPE)
13811 {
13812 if (TYPE_OFFSET_BASETYPE (t)
13813 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
13814 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
13815 {
13816 error ("TYPE_OFFSET_BASETYPE is not record nor union");
13817 debug_tree (TYPE_OFFSET_BASETYPE (t));
13818 error_found = true;
13819 }
13820 }
13821 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13822 || TREE_CODE (t) == FIXED_POINT_TYPE)
13823 {
13824 /* FIXME: The following check should pass:
13825 useless_type_conversion_p (const_cast <tree> (t),
13826 TREE_TYPE (TYPE_MAX_VALUE (t))
13827 but does not for C sizetypes in LTO. */
13828 }
13829 else if (TREE_CODE (t) == ARRAY_TYPE)
13830 {
13831 if (TYPE_ARRAY_MAX_SIZE (t)
13832 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
13833 {
13834 error ("TYPE_ARRAY_MAX_SIZE not INTEGER_CST");
13835 debug_tree (TYPE_ARRAY_MAX_SIZE (t));
13836 error_found = true;
13837 }
13838 }
13839 else if (TYPE_MAXVAL (t))
13840 {
13841 error ("TYPE_MAXVAL non-NULL");
13842 debug_tree (TYPE_MAXVAL (t));
13843 error_found = true;
13844 }
13845
13846 /* Check various uses of TYPE_BINFO. */
13847 if (RECORD_OR_UNION_TYPE_P (t))
13848 {
13849 if (!TYPE_BINFO (t))
13850 ;
13851 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
13852 {
13853 error ("TYPE_BINFO is not TREE_BINFO");
13854 debug_tree (TYPE_BINFO (t));
13855 error_found = true;
13856 }
13857 /* FIXME: Java builds invalid empty binfos that do not have
13858 TREE_TYPE set. */
13859 else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t) && 0)
13860 {
13861 error ("TYPE_BINFO type is not TYPE_MAIN_VARIANT");
13862 debug_tree (TREE_TYPE (TYPE_BINFO (t)));
13863 error_found = true;
13864 }
13865 }
13866 else if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
13867 {
13868 error ("TYPE_LANG_SLOT_1 (binfo) field is non-NULL");
13869 debug_tree (TYPE_LANG_SLOT_1 (t));
13870 error_found = true;
13871 }
13872
13873 /* Check various uses of TYPE_VALUES_RAW. */
13874 if (TREE_CODE (t) == ENUMERAL_TYPE)
13875 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
13876 {
13877 tree value = TREE_VALUE (l);
13878 tree name = TREE_PURPOSE (l);
13879
13880 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
13881 CONST_DECL of ENUMERAL TYPE. */
13882 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
13883 {
13884 error ("Enum value is not CONST_DECL or INTEGER_CST");
13885 debug_tree (value);
13886 debug_tree (name);
13887 error_found = true;
13888 }
13889 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
13890 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
13891 {
13892 error ("Enum value type is not INTEGER_TYPE nor convertible to the enum");
13893 debug_tree (value);
13894 debug_tree (name);
13895 error_found = true;
13896 }
13897 if (TREE_CODE (name) != IDENTIFIER_NODE)
13898 {
13899 error ("Enum value name is not IDENTIFIER_NODE");
13900 debug_tree (value);
13901 debug_tree (name);
13902 error_found = true;
13903 }
13904 }
13905 else if (TREE_CODE (t) == ARRAY_TYPE)
13906 {
13907 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
13908 {
13909 error ("Array TYPE_DOMAIN is not integer type");
13910 debug_tree (TYPE_DOMAIN (t));
13911 error_found = true;
13912 }
13913 }
13914 else if (RECORD_OR_UNION_TYPE_P (t))
13915 {
13916 if (TYPE_FIELDS (t) && !COMPLETE_TYPE_P (t) && in_lto_p)
13917 {
13918 error ("TYPE_FIELDS defined in incomplete type");
13919 error_found = true;
13920 }
13921 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
13922 {
13923 /* TODO: verify properties of decls. */
13924 if (TREE_CODE (fld) == FIELD_DECL)
13925 ;
13926 else if (TREE_CODE (fld) == TYPE_DECL)
13927 ;
13928 else if (TREE_CODE (fld) == CONST_DECL)
13929 ;
13930 else if (TREE_CODE (fld) == VAR_DECL)
13931 ;
13932 else if (TREE_CODE (fld) == TEMPLATE_DECL)
13933 ;
13934 else if (TREE_CODE (fld) == USING_DECL)
13935 ;
13936 else
13937 {
13938 error ("Wrong tree in TYPE_FIELDS list");
13939 debug_tree (fld);
13940 error_found = true;
13941 }
13942 }
13943 }
13944 else if (TREE_CODE (t) == INTEGER_TYPE
13945 || TREE_CODE (t) == BOOLEAN_TYPE
13946 || TREE_CODE (t) == OFFSET_TYPE
13947 || TREE_CODE (t) == REFERENCE_TYPE
13948 || TREE_CODE (t) == NULLPTR_TYPE
13949 || TREE_CODE (t) == POINTER_TYPE)
13950 {
13951 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
13952 {
13953 error ("TYPE_CACHED_VALUES_P is %i while TYPE_CACHED_VALUES is %p",
13954 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
13955 error_found = true;
13956 }
13957 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
13958 {
13959 error ("TYPE_CACHED_VALUES is not TREE_VEC");
13960 debug_tree (TYPE_CACHED_VALUES (t));
13961 error_found = true;
13962 }
13963 /* Verify just enough of cache to ensure that no one copied it to new type.
13964 All copying should go by copy_node that should clear it. */
13965 else if (TYPE_CACHED_VALUES_P (t))
13966 {
13967 int i;
13968 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
13969 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
13970 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
13971 {
13972 error ("wrong TYPE_CACHED_VALUES entry");
13973 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
13974 error_found = true;
13975 break;
13976 }
13977 }
13978 }
13979 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13980 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
13981 {
13982 /* C++ FE uses TREE_PURPOSE to store initial values. */
13983 if (TREE_PURPOSE (l) && in_lto_p)
13984 {
13985 error ("TREE_PURPOSE is non-NULL in TYPE_ARG_TYPES list");
13986 debug_tree (l);
13987 error_found = true;
13988 }
13989 if (!TYPE_P (TREE_VALUE (l)))
13990 {
13991 error ("Wrong entry in TYPE_ARG_TYPES list");
13992 debug_tree (l);
13993 error_found = true;
13994 }
13995 }
13996 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
13997 {
13998 error ("TYPE_VALUES_RAW field is non-NULL");
13999 debug_tree (TYPE_VALUES_RAW (t));
14000 error_found = true;
14001 }
14002 if (TREE_CODE (t) != INTEGER_TYPE
14003 && TREE_CODE (t) != BOOLEAN_TYPE
14004 && TREE_CODE (t) != OFFSET_TYPE
14005 && TREE_CODE (t) != REFERENCE_TYPE
14006 && TREE_CODE (t) != NULLPTR_TYPE
14007 && TREE_CODE (t) != POINTER_TYPE
14008 && TYPE_CACHED_VALUES_P (t))
14009 {
14010 error ("TYPE_CACHED_VALUES_P is set while it should not");
14011 error_found = true;
14012 }
14013 if (TYPE_STRING_FLAG (t)
14014 && TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != INTEGER_TYPE)
14015 {
14016 error ("TYPE_STRING_FLAG is set on wrong type code");
14017 error_found = true;
14018 }
14019 else if (TYPE_STRING_FLAG (t))
14020 {
14021 const_tree b = t;
14022 if (TREE_CODE (b) == ARRAY_TYPE)
14023 b = TREE_TYPE (t);
14024 /* Java builds arrays with TYPE_STRING_FLAG of promoted_char_type
14025 that is 32bits. */
14026 if (TREE_CODE (b) != INTEGER_TYPE)
14027 {
14028 error ("TYPE_STRING_FLAG is set on type that does not look like "
14029 "char nor array of chars");
14030 error_found = true;
14031 }
14032 }
14033
14034 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
14035 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
14036 of a type. */
14037 if (TREE_CODE (t) == METHOD_TYPE
14038 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
14039 {
14040 error ("TYPE_METHOD_BASETYPE is not main variant");
14041 error_found = true;
14042 }
14043
14044 if (error_found)
14045 {
14046 debug_tree (const_cast <tree> (t));
14047 internal_error ("verify_type failed");
14048 }
14049 }
14050
14051
14052 /* Return true if ARG is marked with the nonnull attribute in the
14053 current function signature. */
14054
14055 bool
14056 nonnull_arg_p (const_tree arg)
14057 {
14058 tree t, attrs, fntype;
14059 unsigned HOST_WIDE_INT arg_num;
14060
14061 gcc_assert (TREE_CODE (arg) == PARM_DECL
14062 && (POINTER_TYPE_P (TREE_TYPE (arg))
14063 || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE));
14064
14065 /* The static chain decl is always non null. */
14066 if (arg == cfun->static_chain_decl)
14067 return true;
14068
14069 /* THIS argument of method is always non-NULL. */
14070 if (TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE
14071 && arg == DECL_ARGUMENTS (cfun->decl)
14072 && flag_delete_null_pointer_checks)
14073 return true;
14074
14075 /* Values passed by reference are always non-NULL. */
14076 if (TREE_CODE (TREE_TYPE (arg)) == REFERENCE_TYPE
14077 && flag_delete_null_pointer_checks)
14078 return true;
14079
14080 fntype = TREE_TYPE (cfun->decl);
14081 for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs))
14082 {
14083 attrs = lookup_attribute ("nonnull", attrs);
14084
14085 /* If "nonnull" wasn't specified, we know nothing about the argument. */
14086 if (attrs == NULL_TREE)
14087 return false;
14088
14089 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
14090 if (TREE_VALUE (attrs) == NULL_TREE)
14091 return true;
14092
14093 /* Get the position number for ARG in the function signature. */
14094 for (arg_num = 1, t = DECL_ARGUMENTS (cfun->decl);
14095 t;
14096 t = DECL_CHAIN (t), arg_num++)
14097 {
14098 if (t == arg)
14099 break;
14100 }
14101
14102 gcc_assert (t == arg);
14103
14104 /* Now see if ARG_NUM is mentioned in the nonnull list. */
14105 for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
14106 {
14107 if (compare_tree_int (TREE_VALUE (t), arg_num) == 0)
14108 return true;
14109 }
14110 }
14111
14112 return false;
14113 }
14114
14115 /* Given location LOC, strip away any packed range information
14116 or ad-hoc information. */
14117
14118 location_t
14119 get_pure_location (location_t loc)
14120 {
14121 if (IS_ADHOC_LOC (loc))
14122 loc
14123 = line_table->location_adhoc_data_map.data[loc & MAX_SOURCE_LOCATION].locus;
14124
14125 if (loc >= LINEMAPS_MACRO_LOWEST_LOCATION (line_table))
14126 return loc;
14127
14128 if (loc < RESERVED_LOCATION_COUNT)
14129 return loc;
14130
14131 const line_map *map = linemap_lookup (line_table, loc);
14132 const line_map_ordinary *ordmap = linemap_check_ordinary (map);
14133
14134 return loc & ~((1 << ordmap->m_range_bits) - 1);
14135 }
14136
14137 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
14138 information. */
14139
14140 location_t
14141 set_block (location_t loc, tree block)
14142 {
14143 location_t pure_loc = get_pure_location (loc);
14144 source_range src_range = get_range_from_loc (line_table, loc);
14145 return COMBINE_LOCATION_DATA (line_table, pure_loc, src_range, block);
14146 }
14147
14148 location_t
14149 set_source_range (tree expr, location_t start, location_t finish)
14150 {
14151 source_range src_range;
14152 src_range.m_start = start;
14153 src_range.m_finish = finish;
14154 return set_source_range (expr, src_range);
14155 }
14156
14157 location_t
14158 set_source_range (tree expr, source_range src_range)
14159 {
14160 if (!EXPR_P (expr))
14161 return UNKNOWN_LOCATION;
14162
14163 location_t pure_loc = get_pure_location (EXPR_LOCATION (expr));
14164 location_t adhoc = COMBINE_LOCATION_DATA (line_table,
14165 pure_loc,
14166 src_range,
14167 NULL);
14168 SET_EXPR_LOCATION (expr, adhoc);
14169 return adhoc;
14170 }
14171
14172 location_t
14173 make_location (location_t caret, location_t start, location_t finish)
14174 {
14175 location_t pure_loc = get_pure_location (caret);
14176 source_range src_range;
14177 src_range.m_start = start;
14178 src_range.m_finish = finish;
14179 location_t combined_loc = COMBINE_LOCATION_DATA (line_table,
14180 pure_loc,
14181 src_range,
14182 NULL);
14183 return combined_loc;
14184 }
14185
14186 /* Return the name of combined function FN, for debugging purposes. */
14187
14188 const char *
14189 combined_fn_name (combined_fn fn)
14190 {
14191 if (builtin_fn_p (fn))
14192 {
14193 tree fndecl = builtin_decl_explicit (as_builtin_fn (fn));
14194 return IDENTIFIER_POINTER (DECL_NAME (fndecl));
14195 }
14196 else
14197 return internal_fn_name (as_internal_fn (fn));
14198 }
14199
14200 #if CHECKING_P
14201
14202 namespace selftest {
14203
14204 /* Selftests for tree. */
14205
14206 /* Verify that integer constants are sane. */
14207
14208 static void
14209 test_integer_constants ()
14210 {
14211 ASSERT_TRUE (integer_type_node != NULL);
14212 ASSERT_TRUE (build_int_cst (integer_type_node, 0) != NULL);
14213
14214 tree type = integer_type_node;
14215
14216 tree zero = build_zero_cst (type);
14217 ASSERT_EQ (INTEGER_CST, TREE_CODE (zero));
14218 ASSERT_EQ (type, TREE_TYPE (zero));
14219
14220 tree one = build_int_cst (type, 1);
14221 ASSERT_EQ (INTEGER_CST, TREE_CODE (one));
14222 ASSERT_EQ (type, TREE_TYPE (zero));
14223 }
14224
14225 /* Verify identifiers. */
14226
14227 static void
14228 test_identifiers ()
14229 {
14230 tree identifier = get_identifier ("foo");
14231 ASSERT_EQ (3, IDENTIFIER_LENGTH (identifier));
14232 ASSERT_STREQ ("foo", IDENTIFIER_POINTER (identifier));
14233 }
14234
14235 /* Verify LABEL_DECL. */
14236
14237 static void
14238 test_labels ()
14239 {
14240 tree identifier = get_identifier ("err");
14241 tree label_decl = build_decl (UNKNOWN_LOCATION, LABEL_DECL,
14242 identifier, void_type_node);
14243 ASSERT_EQ (-1, LABEL_DECL_UID (label_decl));
14244 ASSERT_FALSE (FORCED_LABEL (label_decl));
14245 }
14246
14247 /* Run all of the selftests within this file. */
14248
14249 void
14250 tree_c_tests ()
14251 {
14252 test_integer_constants ();
14253 test_identifiers ();
14254 test_labels ();
14255 }
14256
14257 } // namespace selftest
14258
14259 #endif /* CHECKING_P */
14260
14261 #include "gt-tree.h"