]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree.c
genattrtab.c (write_header): Include hash-set.h...
[thirdparty/gcc.git] / gcc / tree.c
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
26
27 It is intended to be language-independent, but occasionally
28 calls language-dependent routines defined (for C) in typecheck.c. */
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "tm.h"
34 #include "flags.h"
35 #include "hash-set.h"
36 #include "machmode.h"
37 #include "vec.h"
38 #include "double-int.h"
39 #include "input.h"
40 #include "alias.h"
41 #include "symtab.h"
42 #include "wide-int.h"
43 #include "inchash.h"
44 #include "tree.h"
45 #include "fold-const.h"
46 #include "stor-layout.h"
47 #include "calls.h"
48 #include "attribs.h"
49 #include "varasm.h"
50 #include "tm_p.h"
51 #include "hashtab.h"
52 #include "hard-reg-set.h"
53 #include "input.h"
54 #include "function.h"
55 #include "obstack.h"
56 #include "toplev.h" /* get_random_seed */
57 #include "inchash.h"
58 #include "filenames.h"
59 #include "output.h"
60 #include "target.h"
61 #include "common/common-target.h"
62 #include "langhooks.h"
63 #include "tree-inline.h"
64 #include "tree-iterator.h"
65 #include "predict.h"
66 #include "dominance.h"
67 #include "cfg.h"
68 #include "basic-block.h"
69 #include "bitmap.h"
70 #include "tree-ssa-alias.h"
71 #include "internal-fn.h"
72 #include "gimple-expr.h"
73 #include "is-a.h"
74 #include "gimple.h"
75 #include "gimple-iterator.h"
76 #include "gimplify.h"
77 #include "gimple-ssa.h"
78 #include "hash-map.h"
79 #include "plugin-api.h"
80 #include "ipa-ref.h"
81 #include "cgraph.h"
82 #include "tree-phinodes.h"
83 #include "stringpool.h"
84 #include "tree-ssanames.h"
85 #include "expr.h"
86 #include "tree-dfa.h"
87 #include "params.h"
88 #include "tree-pass.h"
89 #include "langhooks-def.h"
90 #include "diagnostic.h"
91 #include "tree-diagnostic.h"
92 #include "tree-pretty-print.h"
93 #include "except.h"
94 #include "debug.h"
95 #include "intl.h"
96 #include "wide-int.h"
97 #include "builtins.h"
98
99 /* Tree code classes. */
100
101 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
102 #define END_OF_BASE_TREE_CODES tcc_exceptional,
103
104 const enum tree_code_class tree_code_type[] = {
105 #include "all-tree.def"
106 };
107
108 #undef DEFTREECODE
109 #undef END_OF_BASE_TREE_CODES
110
111 /* Table indexed by tree code giving number of expression
112 operands beyond the fixed part of the node structure.
113 Not used for types or decls. */
114
115 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
116 #define END_OF_BASE_TREE_CODES 0,
117
118 const unsigned char tree_code_length[] = {
119 #include "all-tree.def"
120 };
121
122 #undef DEFTREECODE
123 #undef END_OF_BASE_TREE_CODES
124
125 /* Names of tree components.
126 Used for printing out the tree and error messages. */
127 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
128 #define END_OF_BASE_TREE_CODES "@dummy",
129
130 static const char *const tree_code_name[] = {
131 #include "all-tree.def"
132 };
133
134 #undef DEFTREECODE
135 #undef END_OF_BASE_TREE_CODES
136
137 /* Each tree code class has an associated string representation.
138 These must correspond to the tree_code_class entries. */
139
140 const char *const tree_code_class_strings[] =
141 {
142 "exceptional",
143 "constant",
144 "type",
145 "declaration",
146 "reference",
147 "comparison",
148 "unary",
149 "binary",
150 "statement",
151 "vl_exp",
152 "expression"
153 };
154
155 /* obstack.[ch] explicitly declined to prototype this. */
156 extern int _obstack_allocated_p (struct obstack *h, void *obj);
157
158 /* Statistics-gathering stuff. */
159
160 static int tree_code_counts[MAX_TREE_CODES];
161 int tree_node_counts[(int) all_kinds];
162 int tree_node_sizes[(int) all_kinds];
163
164 /* Keep in sync with tree.h:enum tree_node_kind. */
165 static const char * const tree_node_kind_names[] = {
166 "decls",
167 "types",
168 "blocks",
169 "stmts",
170 "refs",
171 "exprs",
172 "constants",
173 "identifiers",
174 "vecs",
175 "binfos",
176 "ssa names",
177 "constructors",
178 "random kinds",
179 "lang_decl kinds",
180 "lang_type kinds",
181 "omp clauses",
182 };
183
184 /* Unique id for next decl created. */
185 static GTY(()) int next_decl_uid;
186 /* Unique id for next type created. */
187 static GTY(()) int next_type_uid = 1;
188 /* Unique id for next debug decl created. Use negative numbers,
189 to catch erroneous uses. */
190 static GTY(()) int next_debug_decl_uid;
191
192 /* Since we cannot rehash a type after it is in the table, we have to
193 keep the hash code. */
194
195 struct GTY((for_user)) type_hash {
196 unsigned long hash;
197 tree type;
198 };
199
200 /* Initial size of the hash table (rounded to next prime). */
201 #define TYPE_HASH_INITIAL_SIZE 1000
202
203 struct type_cache_hasher : ggc_cache_hasher<type_hash *>
204 {
205 static hashval_t hash (type_hash *t) { return t->hash; }
206 static bool equal (type_hash *a, type_hash *b);
207
208 static void
209 handle_cache_entry (type_hash *&t)
210 {
211 extern void gt_ggc_mx (type_hash *&);
212 if (t == HTAB_DELETED_ENTRY || t == HTAB_EMPTY_ENTRY)
213 return;
214 else if (ggc_marked_p (t->type))
215 gt_ggc_mx (t);
216 else
217 t = static_cast<type_hash *> (HTAB_DELETED_ENTRY);
218 }
219 };
220
221 /* Now here is the hash table. When recording a type, it is added to
222 the slot whose index is the hash code. Note that the hash table is
223 used for several kinds of types (function types, array types and
224 array index range types, for now). While all these live in the
225 same table, they are completely independent, and the hash code is
226 computed differently for each of these. */
227
228 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
229
230 /* Hash table and temporary node for larger integer const values. */
231 static GTY (()) tree int_cst_node;
232
233 struct int_cst_hasher : ggc_cache_hasher<tree>
234 {
235 static hashval_t hash (tree t);
236 static bool equal (tree x, tree y);
237 };
238
239 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
240
241 /* Hash table for optimization flags and target option flags. Use the same
242 hash table for both sets of options. Nodes for building the current
243 optimization and target option nodes. The assumption is most of the time
244 the options created will already be in the hash table, so we avoid
245 allocating and freeing up a node repeatably. */
246 static GTY (()) tree cl_optimization_node;
247 static GTY (()) tree cl_target_option_node;
248
249 struct cl_option_hasher : ggc_cache_hasher<tree>
250 {
251 static hashval_t hash (tree t);
252 static bool equal (tree x, tree y);
253 };
254
255 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
256
257 /* General tree->tree mapping structure for use in hash tables. */
258
259
260 static GTY ((cache))
261 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
262
263 static GTY ((cache))
264 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
265
266 struct tree_vec_map_cache_hasher : ggc_cache_hasher<tree_vec_map *>
267 {
268 static hashval_t hash (tree_vec_map *m) { return DECL_UID (m->base.from); }
269
270 static bool
271 equal (tree_vec_map *a, tree_vec_map *b)
272 {
273 return a->base.from == b->base.from;
274 }
275
276 static void
277 handle_cache_entry (tree_vec_map *&m)
278 {
279 extern void gt_ggc_mx (tree_vec_map *&);
280 if (m == HTAB_EMPTY_ENTRY || m == HTAB_DELETED_ENTRY)
281 return;
282 else if (ggc_marked_p (m->base.from))
283 gt_ggc_mx (m);
284 else
285 m = static_cast<tree_vec_map *> (HTAB_DELETED_ENTRY);
286 }
287 };
288
289 static GTY ((cache))
290 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
291
292 static void set_type_quals (tree, int);
293 static void print_type_hash_statistics (void);
294 static void print_debug_expr_statistics (void);
295 static void print_value_expr_statistics (void);
296 static void type_hash_list (const_tree, inchash::hash &);
297 static void attribute_hash_list (const_tree, inchash::hash &);
298
299 tree global_trees[TI_MAX];
300 tree integer_types[itk_none];
301
302 bool int_n_enabled_p[NUM_INT_N_ENTS];
303 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
304
305 unsigned char tree_contains_struct[MAX_TREE_CODES][64];
306
307 /* Number of operands for each OpenMP clause. */
308 unsigned const char omp_clause_num_ops[] =
309 {
310 0, /* OMP_CLAUSE_ERROR */
311 1, /* OMP_CLAUSE_PRIVATE */
312 1, /* OMP_CLAUSE_SHARED */
313 1, /* OMP_CLAUSE_FIRSTPRIVATE */
314 2, /* OMP_CLAUSE_LASTPRIVATE */
315 4, /* OMP_CLAUSE_REDUCTION */
316 1, /* OMP_CLAUSE_COPYIN */
317 1, /* OMP_CLAUSE_COPYPRIVATE */
318 3, /* OMP_CLAUSE_LINEAR */
319 2, /* OMP_CLAUSE_ALIGNED */
320 1, /* OMP_CLAUSE_DEPEND */
321 1, /* OMP_CLAUSE_UNIFORM */
322 2, /* OMP_CLAUSE_FROM */
323 2, /* OMP_CLAUSE_TO */
324 2, /* OMP_CLAUSE_MAP */
325 1, /* OMP_CLAUSE__LOOPTEMP_ */
326 1, /* OMP_CLAUSE_IF */
327 1, /* OMP_CLAUSE_NUM_THREADS */
328 1, /* OMP_CLAUSE_SCHEDULE */
329 0, /* OMP_CLAUSE_NOWAIT */
330 0, /* OMP_CLAUSE_ORDERED */
331 0, /* OMP_CLAUSE_DEFAULT */
332 3, /* OMP_CLAUSE_COLLAPSE */
333 0, /* OMP_CLAUSE_UNTIED */
334 1, /* OMP_CLAUSE_FINAL */
335 0, /* OMP_CLAUSE_MERGEABLE */
336 1, /* OMP_CLAUSE_DEVICE */
337 1, /* OMP_CLAUSE_DIST_SCHEDULE */
338 0, /* OMP_CLAUSE_INBRANCH */
339 0, /* OMP_CLAUSE_NOTINBRANCH */
340 1, /* OMP_CLAUSE_NUM_TEAMS */
341 1, /* OMP_CLAUSE_THREAD_LIMIT */
342 0, /* OMP_CLAUSE_PROC_BIND */
343 1, /* OMP_CLAUSE_SAFELEN */
344 1, /* OMP_CLAUSE_SIMDLEN */
345 0, /* OMP_CLAUSE_FOR */
346 0, /* OMP_CLAUSE_PARALLEL */
347 0, /* OMP_CLAUSE_SECTIONS */
348 0, /* OMP_CLAUSE_TASKGROUP */
349 1, /* OMP_CLAUSE__SIMDUID_ */
350 1, /* OMP_CLAUSE__CILK_FOR_COUNT_ */
351 };
352
353 const char * const omp_clause_code_name[] =
354 {
355 "error_clause",
356 "private",
357 "shared",
358 "firstprivate",
359 "lastprivate",
360 "reduction",
361 "copyin",
362 "copyprivate",
363 "linear",
364 "aligned",
365 "depend",
366 "uniform",
367 "from",
368 "to",
369 "map",
370 "_looptemp_",
371 "if",
372 "num_threads",
373 "schedule",
374 "nowait",
375 "ordered",
376 "default",
377 "collapse",
378 "untied",
379 "final",
380 "mergeable",
381 "device",
382 "dist_schedule",
383 "inbranch",
384 "notinbranch",
385 "num_teams",
386 "thread_limit",
387 "proc_bind",
388 "safelen",
389 "simdlen",
390 "for",
391 "parallel",
392 "sections",
393 "taskgroup",
394 "_simduid_",
395 "_Cilk_for_count_"
396 };
397
398
399 /* Return the tree node structure used by tree code CODE. */
400
401 static inline enum tree_node_structure_enum
402 tree_node_structure_for_code (enum tree_code code)
403 {
404 switch (TREE_CODE_CLASS (code))
405 {
406 case tcc_declaration:
407 {
408 switch (code)
409 {
410 case FIELD_DECL:
411 return TS_FIELD_DECL;
412 case PARM_DECL:
413 return TS_PARM_DECL;
414 case VAR_DECL:
415 return TS_VAR_DECL;
416 case LABEL_DECL:
417 return TS_LABEL_DECL;
418 case RESULT_DECL:
419 return TS_RESULT_DECL;
420 case DEBUG_EXPR_DECL:
421 return TS_DECL_WRTL;
422 case CONST_DECL:
423 return TS_CONST_DECL;
424 case TYPE_DECL:
425 return TS_TYPE_DECL;
426 case FUNCTION_DECL:
427 return TS_FUNCTION_DECL;
428 case TRANSLATION_UNIT_DECL:
429 return TS_TRANSLATION_UNIT_DECL;
430 default:
431 return TS_DECL_NON_COMMON;
432 }
433 }
434 case tcc_type:
435 return TS_TYPE_NON_COMMON;
436 case tcc_reference:
437 case tcc_comparison:
438 case tcc_unary:
439 case tcc_binary:
440 case tcc_expression:
441 case tcc_statement:
442 case tcc_vl_exp:
443 return TS_EXP;
444 default: /* tcc_constant and tcc_exceptional */
445 break;
446 }
447 switch (code)
448 {
449 /* tcc_constant cases. */
450 case VOID_CST: return TS_TYPED;
451 case INTEGER_CST: return TS_INT_CST;
452 case REAL_CST: return TS_REAL_CST;
453 case FIXED_CST: return TS_FIXED_CST;
454 case COMPLEX_CST: return TS_COMPLEX;
455 case VECTOR_CST: return TS_VECTOR;
456 case STRING_CST: return TS_STRING;
457 /* tcc_exceptional cases. */
458 case ERROR_MARK: return TS_COMMON;
459 case IDENTIFIER_NODE: return TS_IDENTIFIER;
460 case TREE_LIST: return TS_LIST;
461 case TREE_VEC: return TS_VEC;
462 case SSA_NAME: return TS_SSA_NAME;
463 case PLACEHOLDER_EXPR: return TS_COMMON;
464 case STATEMENT_LIST: return TS_STATEMENT_LIST;
465 case BLOCK: return TS_BLOCK;
466 case CONSTRUCTOR: return TS_CONSTRUCTOR;
467 case TREE_BINFO: return TS_BINFO;
468 case OMP_CLAUSE: return TS_OMP_CLAUSE;
469 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
470 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
471
472 default:
473 gcc_unreachable ();
474 }
475 }
476
477
478 /* Initialize tree_contains_struct to describe the hierarchy of tree
479 nodes. */
480
481 static void
482 initialize_tree_contains_struct (void)
483 {
484 unsigned i;
485
486 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
487 {
488 enum tree_code code;
489 enum tree_node_structure_enum ts_code;
490
491 code = (enum tree_code) i;
492 ts_code = tree_node_structure_for_code (code);
493
494 /* Mark the TS structure itself. */
495 tree_contains_struct[code][ts_code] = 1;
496
497 /* Mark all the structures that TS is derived from. */
498 switch (ts_code)
499 {
500 case TS_TYPED:
501 case TS_BLOCK:
502 MARK_TS_BASE (code);
503 break;
504
505 case TS_COMMON:
506 case TS_INT_CST:
507 case TS_REAL_CST:
508 case TS_FIXED_CST:
509 case TS_VECTOR:
510 case TS_STRING:
511 case TS_COMPLEX:
512 case TS_SSA_NAME:
513 case TS_CONSTRUCTOR:
514 case TS_EXP:
515 case TS_STATEMENT_LIST:
516 MARK_TS_TYPED (code);
517 break;
518
519 case TS_IDENTIFIER:
520 case TS_DECL_MINIMAL:
521 case TS_TYPE_COMMON:
522 case TS_LIST:
523 case TS_VEC:
524 case TS_BINFO:
525 case TS_OMP_CLAUSE:
526 case TS_OPTIMIZATION:
527 case TS_TARGET_OPTION:
528 MARK_TS_COMMON (code);
529 break;
530
531 case TS_TYPE_WITH_LANG_SPECIFIC:
532 MARK_TS_TYPE_COMMON (code);
533 break;
534
535 case TS_TYPE_NON_COMMON:
536 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
537 break;
538
539 case TS_DECL_COMMON:
540 MARK_TS_DECL_MINIMAL (code);
541 break;
542
543 case TS_DECL_WRTL:
544 case TS_CONST_DECL:
545 MARK_TS_DECL_COMMON (code);
546 break;
547
548 case TS_DECL_NON_COMMON:
549 MARK_TS_DECL_WITH_VIS (code);
550 break;
551
552 case TS_DECL_WITH_VIS:
553 case TS_PARM_DECL:
554 case TS_LABEL_DECL:
555 case TS_RESULT_DECL:
556 MARK_TS_DECL_WRTL (code);
557 break;
558
559 case TS_FIELD_DECL:
560 MARK_TS_DECL_COMMON (code);
561 break;
562
563 case TS_VAR_DECL:
564 MARK_TS_DECL_WITH_VIS (code);
565 break;
566
567 case TS_TYPE_DECL:
568 case TS_FUNCTION_DECL:
569 MARK_TS_DECL_NON_COMMON (code);
570 break;
571
572 case TS_TRANSLATION_UNIT_DECL:
573 MARK_TS_DECL_COMMON (code);
574 break;
575
576 default:
577 gcc_unreachable ();
578 }
579 }
580
581 /* Basic consistency checks for attributes used in fold. */
582 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
583 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
584 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
585 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
586 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
587 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
588 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
589 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
590 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
591 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
592 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
593 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
594 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
595 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
596 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
597 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
598 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
599 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
600 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
601 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
602 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
603 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
604 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
605 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
606 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
607 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
608 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
609 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
610 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
611 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
612 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
613 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
614 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
615 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
616 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
617 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
618 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
619 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
620 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
621 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
622 }
623
624
625 /* Init tree.c. */
626
627 void
628 init_ttree (void)
629 {
630 /* Initialize the hash table of types. */
631 type_hash_table
632 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
633
634 debug_expr_for_decl
635 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
636
637 value_expr_for_decl
638 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
639
640 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
641
642 int_cst_node = make_int_cst (1, 1);
643
644 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
645
646 cl_optimization_node = make_node (OPTIMIZATION_NODE);
647 cl_target_option_node = make_node (TARGET_OPTION_NODE);
648
649 /* Initialize the tree_contains_struct array. */
650 initialize_tree_contains_struct ();
651 lang_hooks.init_ts ();
652 }
653
654 \f
655 /* The name of the object as the assembler will see it (but before any
656 translations made by ASM_OUTPUT_LABELREF). Often this is the same
657 as DECL_NAME. It is an IDENTIFIER_NODE. */
658 tree
659 decl_assembler_name (tree decl)
660 {
661 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
662 lang_hooks.set_decl_assembler_name (decl);
663 return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name;
664 }
665
666 /* When the target supports COMDAT groups, this indicates which group the
667 DECL is associated with. This can be either an IDENTIFIER_NODE or a
668 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
669 tree
670 decl_comdat_group (const_tree node)
671 {
672 struct symtab_node *snode = symtab_node::get (node);
673 if (!snode)
674 return NULL;
675 return snode->get_comdat_group ();
676 }
677
678 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
679 tree
680 decl_comdat_group_id (const_tree node)
681 {
682 struct symtab_node *snode = symtab_node::get (node);
683 if (!snode)
684 return NULL;
685 return snode->get_comdat_group_id ();
686 }
687
688 /* When the target supports named section, return its name as IDENTIFIER_NODE
689 or NULL if it is in no section. */
690 const char *
691 decl_section_name (const_tree node)
692 {
693 struct symtab_node *snode = symtab_node::get (node);
694 if (!snode)
695 return NULL;
696 return snode->get_section ();
697 }
698
699 /* Set section section name of NODE to VALUE (that is expected to
700 be identifier node) */
701 void
702 set_decl_section_name (tree node, const char *value)
703 {
704 struct symtab_node *snode;
705
706 if (value == NULL)
707 {
708 snode = symtab_node::get (node);
709 if (!snode)
710 return;
711 }
712 else if (TREE_CODE (node) == VAR_DECL)
713 snode = varpool_node::get_create (node);
714 else
715 snode = cgraph_node::get_create (node);
716 snode->set_section (value);
717 }
718
719 /* Return TLS model of a variable NODE. */
720 enum tls_model
721 decl_tls_model (const_tree node)
722 {
723 struct varpool_node *snode = varpool_node::get (node);
724 if (!snode)
725 return TLS_MODEL_NONE;
726 return snode->tls_model;
727 }
728
729 /* Set TLS model of variable NODE to MODEL. */
730 void
731 set_decl_tls_model (tree node, enum tls_model model)
732 {
733 struct varpool_node *vnode;
734
735 if (model == TLS_MODEL_NONE)
736 {
737 vnode = varpool_node::get (node);
738 if (!vnode)
739 return;
740 }
741 else
742 vnode = varpool_node::get_create (node);
743 vnode->tls_model = model;
744 }
745
746 /* Compute the number of bytes occupied by a tree with code CODE.
747 This function cannot be used for nodes that have variable sizes,
748 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
749 size_t
750 tree_code_size (enum tree_code code)
751 {
752 switch (TREE_CODE_CLASS (code))
753 {
754 case tcc_declaration: /* A decl node */
755 {
756 switch (code)
757 {
758 case FIELD_DECL:
759 return sizeof (struct tree_field_decl);
760 case PARM_DECL:
761 return sizeof (struct tree_parm_decl);
762 case VAR_DECL:
763 return sizeof (struct tree_var_decl);
764 case LABEL_DECL:
765 return sizeof (struct tree_label_decl);
766 case RESULT_DECL:
767 return sizeof (struct tree_result_decl);
768 case CONST_DECL:
769 return sizeof (struct tree_const_decl);
770 case TYPE_DECL:
771 return sizeof (struct tree_type_decl);
772 case FUNCTION_DECL:
773 return sizeof (struct tree_function_decl);
774 case DEBUG_EXPR_DECL:
775 return sizeof (struct tree_decl_with_rtl);
776 case TRANSLATION_UNIT_DECL:
777 return sizeof (struct tree_translation_unit_decl);
778 case NAMESPACE_DECL:
779 case IMPORTED_DECL:
780 case NAMELIST_DECL:
781 return sizeof (struct tree_decl_non_common);
782 default:
783 return lang_hooks.tree_size (code);
784 }
785 }
786
787 case tcc_type: /* a type node */
788 return sizeof (struct tree_type_non_common);
789
790 case tcc_reference: /* a reference */
791 case tcc_expression: /* an expression */
792 case tcc_statement: /* an expression with side effects */
793 case tcc_comparison: /* a comparison expression */
794 case tcc_unary: /* a unary arithmetic expression */
795 case tcc_binary: /* a binary arithmetic expression */
796 return (sizeof (struct tree_exp)
797 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
798
799 case tcc_constant: /* a constant */
800 switch (code)
801 {
802 case VOID_CST: return sizeof (struct tree_typed);
803 case INTEGER_CST: gcc_unreachable ();
804 case REAL_CST: return sizeof (struct tree_real_cst);
805 case FIXED_CST: return sizeof (struct tree_fixed_cst);
806 case COMPLEX_CST: return sizeof (struct tree_complex);
807 case VECTOR_CST: return sizeof (struct tree_vector);
808 case STRING_CST: gcc_unreachable ();
809 default:
810 return lang_hooks.tree_size (code);
811 }
812
813 case tcc_exceptional: /* something random, like an identifier. */
814 switch (code)
815 {
816 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
817 case TREE_LIST: return sizeof (struct tree_list);
818
819 case ERROR_MARK:
820 case PLACEHOLDER_EXPR: return sizeof (struct tree_common);
821
822 case TREE_VEC:
823 case OMP_CLAUSE: gcc_unreachable ();
824
825 case SSA_NAME: return sizeof (struct tree_ssa_name);
826
827 case STATEMENT_LIST: return sizeof (struct tree_statement_list);
828 case BLOCK: return sizeof (struct tree_block);
829 case CONSTRUCTOR: return sizeof (struct tree_constructor);
830 case OPTIMIZATION_NODE: return sizeof (struct tree_optimization_option);
831 case TARGET_OPTION_NODE: return sizeof (struct tree_target_option);
832
833 default:
834 return lang_hooks.tree_size (code);
835 }
836
837 default:
838 gcc_unreachable ();
839 }
840 }
841
842 /* Compute the number of bytes occupied by NODE. This routine only
843 looks at TREE_CODE, except for those nodes that have variable sizes. */
844 size_t
845 tree_size (const_tree node)
846 {
847 const enum tree_code code = TREE_CODE (node);
848 switch (code)
849 {
850 case INTEGER_CST:
851 return (sizeof (struct tree_int_cst)
852 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
853
854 case TREE_BINFO:
855 return (offsetof (struct tree_binfo, base_binfos)
856 + vec<tree, va_gc>
857 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
858
859 case TREE_VEC:
860 return (sizeof (struct tree_vec)
861 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
862
863 case VECTOR_CST:
864 return (sizeof (struct tree_vector)
865 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node)) - 1) * sizeof (tree));
866
867 case STRING_CST:
868 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
869
870 case OMP_CLAUSE:
871 return (sizeof (struct tree_omp_clause)
872 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
873 * sizeof (tree));
874
875 default:
876 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
877 return (sizeof (struct tree_exp)
878 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
879 else
880 return tree_code_size (code);
881 }
882 }
883
884 /* Record interesting allocation statistics for a tree node with CODE
885 and LENGTH. */
886
887 static void
888 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
889 size_t length ATTRIBUTE_UNUSED)
890 {
891 enum tree_code_class type = TREE_CODE_CLASS (code);
892 tree_node_kind kind;
893
894 if (!GATHER_STATISTICS)
895 return;
896
897 switch (type)
898 {
899 case tcc_declaration: /* A decl node */
900 kind = d_kind;
901 break;
902
903 case tcc_type: /* a type node */
904 kind = t_kind;
905 break;
906
907 case tcc_statement: /* an expression with side effects */
908 kind = s_kind;
909 break;
910
911 case tcc_reference: /* a reference */
912 kind = r_kind;
913 break;
914
915 case tcc_expression: /* an expression */
916 case tcc_comparison: /* a comparison expression */
917 case tcc_unary: /* a unary arithmetic expression */
918 case tcc_binary: /* a binary arithmetic expression */
919 kind = e_kind;
920 break;
921
922 case tcc_constant: /* a constant */
923 kind = c_kind;
924 break;
925
926 case tcc_exceptional: /* something random, like an identifier. */
927 switch (code)
928 {
929 case IDENTIFIER_NODE:
930 kind = id_kind;
931 break;
932
933 case TREE_VEC:
934 kind = vec_kind;
935 break;
936
937 case TREE_BINFO:
938 kind = binfo_kind;
939 break;
940
941 case SSA_NAME:
942 kind = ssa_name_kind;
943 break;
944
945 case BLOCK:
946 kind = b_kind;
947 break;
948
949 case CONSTRUCTOR:
950 kind = constr_kind;
951 break;
952
953 case OMP_CLAUSE:
954 kind = omp_clause_kind;
955 break;
956
957 default:
958 kind = x_kind;
959 break;
960 }
961 break;
962
963 case tcc_vl_exp:
964 kind = e_kind;
965 break;
966
967 default:
968 gcc_unreachable ();
969 }
970
971 tree_code_counts[(int) code]++;
972 tree_node_counts[(int) kind]++;
973 tree_node_sizes[(int) kind] += length;
974 }
975
976 /* Allocate and return a new UID from the DECL_UID namespace. */
977
978 int
979 allocate_decl_uid (void)
980 {
981 return next_decl_uid++;
982 }
983
984 /* Return a newly allocated node of code CODE. For decl and type
985 nodes, some other fields are initialized. The rest of the node is
986 initialized to zero. This function cannot be used for TREE_VEC,
987 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
988 tree_code_size.
989
990 Achoo! I got a code in the node. */
991
992 tree
993 make_node_stat (enum tree_code code MEM_STAT_DECL)
994 {
995 tree t;
996 enum tree_code_class type = TREE_CODE_CLASS (code);
997 size_t length = tree_code_size (code);
998
999 record_node_allocation_statistics (code, length);
1000
1001 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1002 TREE_SET_CODE (t, code);
1003
1004 switch (type)
1005 {
1006 case tcc_statement:
1007 TREE_SIDE_EFFECTS (t) = 1;
1008 break;
1009
1010 case tcc_declaration:
1011 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1012 {
1013 if (code == FUNCTION_DECL)
1014 {
1015 DECL_ALIGN (t) = FUNCTION_BOUNDARY;
1016 DECL_MODE (t) = FUNCTION_MODE;
1017 }
1018 else
1019 DECL_ALIGN (t) = 1;
1020 }
1021 DECL_SOURCE_LOCATION (t) = input_location;
1022 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1023 DECL_UID (t) = --next_debug_decl_uid;
1024 else
1025 {
1026 DECL_UID (t) = allocate_decl_uid ();
1027 SET_DECL_PT_UID (t, -1);
1028 }
1029 if (TREE_CODE (t) == LABEL_DECL)
1030 LABEL_DECL_UID (t) = -1;
1031
1032 break;
1033
1034 case tcc_type:
1035 TYPE_UID (t) = next_type_uid++;
1036 TYPE_ALIGN (t) = BITS_PER_UNIT;
1037 TYPE_USER_ALIGN (t) = 0;
1038 TYPE_MAIN_VARIANT (t) = t;
1039 TYPE_CANONICAL (t) = t;
1040
1041 /* Default to no attributes for type, but let target change that. */
1042 TYPE_ATTRIBUTES (t) = NULL_TREE;
1043 targetm.set_default_type_attributes (t);
1044
1045 /* We have not yet computed the alias set for this type. */
1046 TYPE_ALIAS_SET (t) = -1;
1047 break;
1048
1049 case tcc_constant:
1050 TREE_CONSTANT (t) = 1;
1051 break;
1052
1053 case tcc_expression:
1054 switch (code)
1055 {
1056 case INIT_EXPR:
1057 case MODIFY_EXPR:
1058 case VA_ARG_EXPR:
1059 case PREDECREMENT_EXPR:
1060 case PREINCREMENT_EXPR:
1061 case POSTDECREMENT_EXPR:
1062 case POSTINCREMENT_EXPR:
1063 /* All of these have side-effects, no matter what their
1064 operands are. */
1065 TREE_SIDE_EFFECTS (t) = 1;
1066 break;
1067
1068 default:
1069 break;
1070 }
1071 break;
1072
1073 default:
1074 /* Other classes need no special treatment. */
1075 break;
1076 }
1077
1078 return t;
1079 }
1080 \f
1081 /* Return a new node with the same contents as NODE except that its
1082 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1083
1084 tree
1085 copy_node_stat (tree node MEM_STAT_DECL)
1086 {
1087 tree t;
1088 enum tree_code code = TREE_CODE (node);
1089 size_t length;
1090
1091 gcc_assert (code != STATEMENT_LIST);
1092
1093 length = tree_size (node);
1094 record_node_allocation_statistics (code, length);
1095 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1096 memcpy (t, node, length);
1097
1098 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1099 TREE_CHAIN (t) = 0;
1100 TREE_ASM_WRITTEN (t) = 0;
1101 TREE_VISITED (t) = 0;
1102
1103 if (TREE_CODE_CLASS (code) == tcc_declaration)
1104 {
1105 if (code == DEBUG_EXPR_DECL)
1106 DECL_UID (t) = --next_debug_decl_uid;
1107 else
1108 {
1109 DECL_UID (t) = allocate_decl_uid ();
1110 if (DECL_PT_UID_SET_P (node))
1111 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1112 }
1113 if ((TREE_CODE (node) == PARM_DECL || TREE_CODE (node) == VAR_DECL)
1114 && DECL_HAS_VALUE_EXPR_P (node))
1115 {
1116 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1117 DECL_HAS_VALUE_EXPR_P (t) = 1;
1118 }
1119 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1120 if (TREE_CODE (node) == VAR_DECL)
1121 {
1122 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1123 t->decl_with_vis.symtab_node = NULL;
1124 }
1125 if (TREE_CODE (node) == VAR_DECL && DECL_HAS_INIT_PRIORITY_P (node))
1126 {
1127 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1128 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1129 }
1130 if (TREE_CODE (node) == FUNCTION_DECL)
1131 {
1132 DECL_STRUCT_FUNCTION (t) = NULL;
1133 t->decl_with_vis.symtab_node = NULL;
1134 }
1135 }
1136 else if (TREE_CODE_CLASS (code) == tcc_type)
1137 {
1138 TYPE_UID (t) = next_type_uid++;
1139 /* The following is so that the debug code for
1140 the copy is different from the original type.
1141 The two statements usually duplicate each other
1142 (because they clear fields of the same union),
1143 but the optimizer should catch that. */
1144 TYPE_SYMTAB_POINTER (t) = 0;
1145 TYPE_SYMTAB_ADDRESS (t) = 0;
1146
1147 /* Do not copy the values cache. */
1148 if (TYPE_CACHED_VALUES_P (t))
1149 {
1150 TYPE_CACHED_VALUES_P (t) = 0;
1151 TYPE_CACHED_VALUES (t) = NULL_TREE;
1152 }
1153 }
1154
1155 return t;
1156 }
1157
1158 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1159 For example, this can copy a list made of TREE_LIST nodes. */
1160
1161 tree
1162 copy_list (tree list)
1163 {
1164 tree head;
1165 tree prev, next;
1166
1167 if (list == 0)
1168 return 0;
1169
1170 head = prev = copy_node (list);
1171 next = TREE_CHAIN (list);
1172 while (next)
1173 {
1174 TREE_CHAIN (prev) = copy_node (next);
1175 prev = TREE_CHAIN (prev);
1176 next = TREE_CHAIN (next);
1177 }
1178 return head;
1179 }
1180
1181 \f
1182 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1183 INTEGER_CST with value CST and type TYPE. */
1184
1185 static unsigned int
1186 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1187 {
1188 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1189 /* We need an extra zero HWI if CST is an unsigned integer with its
1190 upper bit set, and if CST occupies a whole number of HWIs. */
1191 if (TYPE_UNSIGNED (type)
1192 && wi::neg_p (cst)
1193 && (cst.get_precision () % HOST_BITS_PER_WIDE_INT) == 0)
1194 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1195 return cst.get_len ();
1196 }
1197
1198 /* Return a new INTEGER_CST with value CST and type TYPE. */
1199
1200 static tree
1201 build_new_int_cst (tree type, const wide_int &cst)
1202 {
1203 unsigned int len = cst.get_len ();
1204 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1205 tree nt = make_int_cst (len, ext_len);
1206
1207 if (len < ext_len)
1208 {
1209 --ext_len;
1210 TREE_INT_CST_ELT (nt, ext_len) = 0;
1211 for (unsigned int i = len; i < ext_len; ++i)
1212 TREE_INT_CST_ELT (nt, i) = -1;
1213 }
1214 else if (TYPE_UNSIGNED (type)
1215 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1216 {
1217 len--;
1218 TREE_INT_CST_ELT (nt, len)
1219 = zext_hwi (cst.elt (len),
1220 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1221 }
1222
1223 for (unsigned int i = 0; i < len; i++)
1224 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1225 TREE_TYPE (nt) = type;
1226 return nt;
1227 }
1228
1229 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1230
1231 tree
1232 build_int_cst (tree type, HOST_WIDE_INT low)
1233 {
1234 /* Support legacy code. */
1235 if (!type)
1236 type = integer_type_node;
1237
1238 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1239 }
1240
1241 tree
1242 build_int_cstu (tree type, unsigned HOST_WIDE_INT cst)
1243 {
1244 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1245 }
1246
1247 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1248
1249 tree
1250 build_int_cst_type (tree type, HOST_WIDE_INT low)
1251 {
1252 gcc_assert (type);
1253 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1254 }
1255
1256 /* Constructs tree in type TYPE from with value given by CST. Signedness
1257 of CST is assumed to be the same as the signedness of TYPE. */
1258
1259 tree
1260 double_int_to_tree (tree type, double_int cst)
1261 {
1262 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1263 }
1264
1265 /* We force the wide_int CST to the range of the type TYPE by sign or
1266 zero extending it. OVERFLOWABLE indicates if we are interested in
1267 overflow of the value, when >0 we are only interested in signed
1268 overflow, for <0 we are interested in any overflow. OVERFLOWED
1269 indicates whether overflow has already occurred. CONST_OVERFLOWED
1270 indicates whether constant overflow has already occurred. We force
1271 T's value to be within range of T's type (by setting to 0 or 1 all
1272 the bits outside the type's range). We set TREE_OVERFLOWED if,
1273 OVERFLOWED is nonzero,
1274 or OVERFLOWABLE is >0 and signed overflow occurs
1275 or OVERFLOWABLE is <0 and any overflow occurs
1276 We return a new tree node for the extended wide_int. The node
1277 is shared if no overflow flags are set. */
1278
1279
1280 tree
1281 force_fit_type (tree type, const wide_int_ref &cst,
1282 int overflowable, bool overflowed)
1283 {
1284 signop sign = TYPE_SIGN (type);
1285
1286 /* If we need to set overflow flags, return a new unshared node. */
1287 if (overflowed || !wi::fits_to_tree_p (cst, type))
1288 {
1289 if (overflowed
1290 || overflowable < 0
1291 || (overflowable > 0 && sign == SIGNED))
1292 {
1293 wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign);
1294 tree t = build_new_int_cst (type, tmp);
1295 TREE_OVERFLOW (t) = 1;
1296 return t;
1297 }
1298 }
1299
1300 /* Else build a shared node. */
1301 return wide_int_to_tree (type, cst);
1302 }
1303
1304 /* These are the hash table functions for the hash table of INTEGER_CST
1305 nodes of a sizetype. */
1306
1307 /* Return the hash code code X, an INTEGER_CST. */
1308
1309 hashval_t
1310 int_cst_hasher::hash (tree x)
1311 {
1312 const_tree const t = x;
1313 hashval_t code = TYPE_UID (TREE_TYPE (t));
1314 int i;
1315
1316 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1317 code ^= TREE_INT_CST_ELT (t, i);
1318
1319 return code;
1320 }
1321
1322 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1323 is the same as that given by *Y, which is the same. */
1324
1325 bool
1326 int_cst_hasher::equal (tree x, tree y)
1327 {
1328 const_tree const xt = x;
1329 const_tree const yt = y;
1330
1331 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1332 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1333 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1334 return false;
1335
1336 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1337 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1338 return false;
1339
1340 return true;
1341 }
1342
1343 /* Create an INT_CST node of TYPE and value CST.
1344 The returned node is always shared. For small integers we use a
1345 per-type vector cache, for larger ones we use a single hash table.
1346 The value is extended from its precision according to the sign of
1347 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1348 the upper bits and ensures that hashing and value equality based
1349 upon the underlying HOST_WIDE_INTs works without masking. */
1350
1351 tree
1352 wide_int_to_tree (tree type, const wide_int_ref &pcst)
1353 {
1354 tree t;
1355 int ix = -1;
1356 int limit = 0;
1357
1358 gcc_assert (type);
1359 unsigned int prec = TYPE_PRECISION (type);
1360 signop sgn = TYPE_SIGN (type);
1361
1362 /* Verify that everything is canonical. */
1363 int l = pcst.get_len ();
1364 if (l > 1)
1365 {
1366 if (pcst.elt (l - 1) == 0)
1367 gcc_checking_assert (pcst.elt (l - 2) < 0);
1368 if (pcst.elt (l - 1) == (HOST_WIDE_INT) -1)
1369 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1370 }
1371
1372 wide_int cst = wide_int::from (pcst, prec, sgn);
1373 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1374
1375 if (ext_len == 1)
1376 {
1377 /* We just need to store a single HOST_WIDE_INT. */
1378 HOST_WIDE_INT hwi;
1379 if (TYPE_UNSIGNED (type))
1380 hwi = cst.to_uhwi ();
1381 else
1382 hwi = cst.to_shwi ();
1383
1384 switch (TREE_CODE (type))
1385 {
1386 case NULLPTR_TYPE:
1387 gcc_assert (hwi == 0);
1388 /* Fallthru. */
1389
1390 case POINTER_TYPE:
1391 case REFERENCE_TYPE:
1392 case POINTER_BOUNDS_TYPE:
1393 /* Cache NULL pointer and zero bounds. */
1394 if (hwi == 0)
1395 {
1396 limit = 1;
1397 ix = 0;
1398 }
1399 break;
1400
1401 case BOOLEAN_TYPE:
1402 /* Cache false or true. */
1403 limit = 2;
1404 if (hwi < 2)
1405 ix = hwi;
1406 break;
1407
1408 case INTEGER_TYPE:
1409 case OFFSET_TYPE:
1410 if (TYPE_SIGN (type) == UNSIGNED)
1411 {
1412 /* Cache [0, N). */
1413 limit = INTEGER_SHARE_LIMIT;
1414 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1))
1415 ix = hwi;
1416 }
1417 else
1418 {
1419 /* Cache [-1, N). */
1420 limit = INTEGER_SHARE_LIMIT + 1;
1421 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1))
1422 ix = hwi + 1;
1423 }
1424 break;
1425
1426 case ENUMERAL_TYPE:
1427 break;
1428
1429 default:
1430 gcc_unreachable ();
1431 }
1432
1433 if (ix >= 0)
1434 {
1435 /* Look for it in the type's vector of small shared ints. */
1436 if (!TYPE_CACHED_VALUES_P (type))
1437 {
1438 TYPE_CACHED_VALUES_P (type) = 1;
1439 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1440 }
1441
1442 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1443 if (t)
1444 /* Make sure no one is clobbering the shared constant. */
1445 gcc_checking_assert (TREE_TYPE (t) == type
1446 && TREE_INT_CST_NUNITS (t) == 1
1447 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1448 && TREE_INT_CST_EXT_NUNITS (t) == 1
1449 && TREE_INT_CST_ELT (t, 0) == hwi);
1450 else
1451 {
1452 /* Create a new shared int. */
1453 t = build_new_int_cst (type, cst);
1454 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1455 }
1456 }
1457 else
1458 {
1459 /* Use the cache of larger shared ints, using int_cst_node as
1460 a temporary. */
1461
1462 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1463 TREE_TYPE (int_cst_node) = type;
1464
1465 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1466 t = *slot;
1467 if (!t)
1468 {
1469 /* Insert this one into the hash table. */
1470 t = int_cst_node;
1471 *slot = t;
1472 /* Make a new node for next time round. */
1473 int_cst_node = make_int_cst (1, 1);
1474 }
1475 }
1476 }
1477 else
1478 {
1479 /* The value either hashes properly or we drop it on the floor
1480 for the gc to take care of. There will not be enough of them
1481 to worry about. */
1482
1483 tree nt = build_new_int_cst (type, cst);
1484 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1485 t = *slot;
1486 if (!t)
1487 {
1488 /* Insert this one into the hash table. */
1489 t = nt;
1490 *slot = t;
1491 }
1492 }
1493
1494 return t;
1495 }
1496
1497 void
1498 cache_integer_cst (tree t)
1499 {
1500 tree type = TREE_TYPE (t);
1501 int ix = -1;
1502 int limit = 0;
1503 int prec = TYPE_PRECISION (type);
1504
1505 gcc_assert (!TREE_OVERFLOW (t));
1506
1507 switch (TREE_CODE (type))
1508 {
1509 case NULLPTR_TYPE:
1510 gcc_assert (integer_zerop (t));
1511 /* Fallthru. */
1512
1513 case POINTER_TYPE:
1514 case REFERENCE_TYPE:
1515 /* Cache NULL pointer. */
1516 if (integer_zerop (t))
1517 {
1518 limit = 1;
1519 ix = 0;
1520 }
1521 break;
1522
1523 case BOOLEAN_TYPE:
1524 /* Cache false or true. */
1525 limit = 2;
1526 if (wi::ltu_p (t, 2))
1527 ix = TREE_INT_CST_ELT (t, 0);
1528 break;
1529
1530 case INTEGER_TYPE:
1531 case OFFSET_TYPE:
1532 if (TYPE_UNSIGNED (type))
1533 {
1534 /* Cache 0..N */
1535 limit = INTEGER_SHARE_LIMIT;
1536
1537 /* This is a little hokie, but if the prec is smaller than
1538 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1539 obvious test will not get the correct answer. */
1540 if (prec < HOST_BITS_PER_WIDE_INT)
1541 {
1542 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1543 ix = tree_to_uhwi (t);
1544 }
1545 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1546 ix = tree_to_uhwi (t);
1547 }
1548 else
1549 {
1550 /* Cache -1..N */
1551 limit = INTEGER_SHARE_LIMIT + 1;
1552
1553 if (integer_minus_onep (t))
1554 ix = 0;
1555 else if (!wi::neg_p (t))
1556 {
1557 if (prec < HOST_BITS_PER_WIDE_INT)
1558 {
1559 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1560 ix = tree_to_shwi (t) + 1;
1561 }
1562 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1563 ix = tree_to_shwi (t) + 1;
1564 }
1565 }
1566 break;
1567
1568 case ENUMERAL_TYPE:
1569 break;
1570
1571 default:
1572 gcc_unreachable ();
1573 }
1574
1575 if (ix >= 0)
1576 {
1577 /* Look for it in the type's vector of small shared ints. */
1578 if (!TYPE_CACHED_VALUES_P (type))
1579 {
1580 TYPE_CACHED_VALUES_P (type) = 1;
1581 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1582 }
1583
1584 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1585 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1586 }
1587 else
1588 {
1589 /* Use the cache of larger shared ints. */
1590 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1591 /* If there is already an entry for the number verify it's the
1592 same. */
1593 if (*slot)
1594 gcc_assert (wi::eq_p (tree (*slot), t));
1595 else
1596 /* Otherwise insert this one into the hash table. */
1597 *slot = t;
1598 }
1599 }
1600
1601
1602 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1603 and the rest are zeros. */
1604
1605 tree
1606 build_low_bits_mask (tree type, unsigned bits)
1607 {
1608 gcc_assert (bits <= TYPE_PRECISION (type));
1609
1610 return wide_int_to_tree (type, wi::mask (bits, false,
1611 TYPE_PRECISION (type)));
1612 }
1613
1614 /* Checks that X is integer constant that can be expressed in (unsigned)
1615 HOST_WIDE_INT without loss of precision. */
1616
1617 bool
1618 cst_and_fits_in_hwi (const_tree x)
1619 {
1620 if (TREE_CODE (x) != INTEGER_CST)
1621 return false;
1622
1623 if (TYPE_PRECISION (TREE_TYPE (x)) > HOST_BITS_PER_WIDE_INT)
1624 return false;
1625
1626 return TREE_INT_CST_NUNITS (x) == 1;
1627 }
1628
1629 /* Build a newly constructed TREE_VEC node of length LEN. */
1630
1631 tree
1632 make_vector_stat (unsigned len MEM_STAT_DECL)
1633 {
1634 tree t;
1635 unsigned length = (len - 1) * sizeof (tree) + sizeof (struct tree_vector);
1636
1637 record_node_allocation_statistics (VECTOR_CST, length);
1638
1639 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1640
1641 TREE_SET_CODE (t, VECTOR_CST);
1642 TREE_CONSTANT (t) = 1;
1643
1644 return t;
1645 }
1646
1647 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1648 are in a list pointed to by VALS. */
1649
1650 tree
1651 build_vector_stat (tree type, tree *vals MEM_STAT_DECL)
1652 {
1653 int over = 0;
1654 unsigned cnt = 0;
1655 tree v = make_vector (TYPE_VECTOR_SUBPARTS (type));
1656 TREE_TYPE (v) = type;
1657
1658 /* Iterate through elements and check for overflow. */
1659 for (cnt = 0; cnt < TYPE_VECTOR_SUBPARTS (type); ++cnt)
1660 {
1661 tree value = vals[cnt];
1662
1663 VECTOR_CST_ELT (v, cnt) = value;
1664
1665 /* Don't crash if we get an address constant. */
1666 if (!CONSTANT_CLASS_P (value))
1667 continue;
1668
1669 over |= TREE_OVERFLOW (value);
1670 }
1671
1672 TREE_OVERFLOW (v) = over;
1673 return v;
1674 }
1675
1676 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1677 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1678
1679 tree
1680 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1681 {
1682 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
1683 unsigned HOST_WIDE_INT idx;
1684 tree value;
1685
1686 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1687 vec[idx] = value;
1688 for (; idx < TYPE_VECTOR_SUBPARTS (type); ++idx)
1689 vec[idx] = build_zero_cst (TREE_TYPE (type));
1690
1691 return build_vector (type, vec);
1692 }
1693
1694 /* Build a vector of type VECTYPE where all the elements are SCs. */
1695 tree
1696 build_vector_from_val (tree vectype, tree sc)
1697 {
1698 int i, nunits = TYPE_VECTOR_SUBPARTS (vectype);
1699
1700 if (sc == error_mark_node)
1701 return sc;
1702
1703 /* Verify that the vector type is suitable for SC. Note that there
1704 is some inconsistency in the type-system with respect to restrict
1705 qualifications of pointers. Vector types always have a main-variant
1706 element type and the qualification is applied to the vector-type.
1707 So TREE_TYPE (vector-type) does not return a properly qualified
1708 vector element-type. */
1709 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1710 TREE_TYPE (vectype)));
1711
1712 if (CONSTANT_CLASS_P (sc))
1713 {
1714 tree *v = XALLOCAVEC (tree, nunits);
1715 for (i = 0; i < nunits; ++i)
1716 v[i] = sc;
1717 return build_vector (vectype, v);
1718 }
1719 else
1720 {
1721 vec<constructor_elt, va_gc> *v;
1722 vec_alloc (v, nunits);
1723 for (i = 0; i < nunits; ++i)
1724 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1725 return build_constructor (vectype, v);
1726 }
1727 }
1728
1729 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1730 are in the vec pointed to by VALS. */
1731 tree
1732 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
1733 {
1734 tree c = make_node (CONSTRUCTOR);
1735 unsigned int i;
1736 constructor_elt *elt;
1737 bool constant_p = true;
1738 bool side_effects_p = false;
1739
1740 TREE_TYPE (c) = type;
1741 CONSTRUCTOR_ELTS (c) = vals;
1742
1743 FOR_EACH_VEC_SAFE_ELT (vals, i, elt)
1744 {
1745 /* Mostly ctors will have elts that don't have side-effects, so
1746 the usual case is to scan all the elements. Hence a single
1747 loop for both const and side effects, rather than one loop
1748 each (with early outs). */
1749 if (!TREE_CONSTANT (elt->value))
1750 constant_p = false;
1751 if (TREE_SIDE_EFFECTS (elt->value))
1752 side_effects_p = true;
1753 }
1754
1755 TREE_SIDE_EFFECTS (c) = side_effects_p;
1756 TREE_CONSTANT (c) = constant_p;
1757
1758 return c;
1759 }
1760
1761 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1762 INDEX and VALUE. */
1763 tree
1764 build_constructor_single (tree type, tree index, tree value)
1765 {
1766 vec<constructor_elt, va_gc> *v;
1767 constructor_elt elt = {index, value};
1768
1769 vec_alloc (v, 1);
1770 v->quick_push (elt);
1771
1772 return build_constructor (type, v);
1773 }
1774
1775
1776 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1777 are in a list pointed to by VALS. */
1778 tree
1779 build_constructor_from_list (tree type, tree vals)
1780 {
1781 tree t;
1782 vec<constructor_elt, va_gc> *v = NULL;
1783
1784 if (vals)
1785 {
1786 vec_alloc (v, list_length (vals));
1787 for (t = vals; t; t = TREE_CHAIN (t))
1788 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
1789 }
1790
1791 return build_constructor (type, v);
1792 }
1793
1794 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1795 of elements, provided as index/value pairs. */
1796
1797 tree
1798 build_constructor_va (tree type, int nelts, ...)
1799 {
1800 vec<constructor_elt, va_gc> *v = NULL;
1801 va_list p;
1802
1803 va_start (p, nelts);
1804 vec_alloc (v, nelts);
1805 while (nelts--)
1806 {
1807 tree index = va_arg (p, tree);
1808 tree value = va_arg (p, tree);
1809 CONSTRUCTOR_APPEND_ELT (v, index, value);
1810 }
1811 va_end (p);
1812 return build_constructor (type, v);
1813 }
1814
1815 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1816
1817 tree
1818 build_fixed (tree type, FIXED_VALUE_TYPE f)
1819 {
1820 tree v;
1821 FIXED_VALUE_TYPE *fp;
1822
1823 v = make_node (FIXED_CST);
1824 fp = ggc_alloc<fixed_value> ();
1825 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
1826
1827 TREE_TYPE (v) = type;
1828 TREE_FIXED_CST_PTR (v) = fp;
1829 return v;
1830 }
1831
1832 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1833
1834 tree
1835 build_real (tree type, REAL_VALUE_TYPE d)
1836 {
1837 tree v;
1838 REAL_VALUE_TYPE *dp;
1839 int overflow = 0;
1840
1841 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1842 Consider doing it via real_convert now. */
1843
1844 v = make_node (REAL_CST);
1845 dp = ggc_alloc<real_value> ();
1846 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
1847
1848 TREE_TYPE (v) = type;
1849 TREE_REAL_CST_PTR (v) = dp;
1850 TREE_OVERFLOW (v) = overflow;
1851 return v;
1852 }
1853
1854 /* Return a new REAL_CST node whose type is TYPE
1855 and whose value is the integer value of the INTEGER_CST node I. */
1856
1857 REAL_VALUE_TYPE
1858 real_value_from_int_cst (const_tree type, const_tree i)
1859 {
1860 REAL_VALUE_TYPE d;
1861
1862 /* Clear all bits of the real value type so that we can later do
1863 bitwise comparisons to see if two values are the same. */
1864 memset (&d, 0, sizeof d);
1865
1866 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, i,
1867 TYPE_SIGN (TREE_TYPE (i)));
1868 return d;
1869 }
1870
1871 /* Given a tree representing an integer constant I, return a tree
1872 representing the same value as a floating-point constant of type TYPE. */
1873
1874 tree
1875 build_real_from_int_cst (tree type, const_tree i)
1876 {
1877 tree v;
1878 int overflow = TREE_OVERFLOW (i);
1879
1880 v = build_real (type, real_value_from_int_cst (type, i));
1881
1882 TREE_OVERFLOW (v) |= overflow;
1883 return v;
1884 }
1885
1886 /* Return a newly constructed STRING_CST node whose value is
1887 the LEN characters at STR.
1888 Note that for a C string literal, LEN should include the trailing NUL.
1889 The TREE_TYPE is not initialized. */
1890
1891 tree
1892 build_string (int len, const char *str)
1893 {
1894 tree s;
1895 size_t length;
1896
1897 /* Do not waste bytes provided by padding of struct tree_string. */
1898 length = len + offsetof (struct tree_string, str) + 1;
1899
1900 record_node_allocation_statistics (STRING_CST, length);
1901
1902 s = (tree) ggc_internal_alloc (length);
1903
1904 memset (s, 0, sizeof (struct tree_typed));
1905 TREE_SET_CODE (s, STRING_CST);
1906 TREE_CONSTANT (s) = 1;
1907 TREE_STRING_LENGTH (s) = len;
1908 memcpy (s->string.str, str, len);
1909 s->string.str[len] = '\0';
1910
1911 return s;
1912 }
1913
1914 /* Return a newly constructed COMPLEX_CST node whose value is
1915 specified by the real and imaginary parts REAL and IMAG.
1916 Both REAL and IMAG should be constant nodes. TYPE, if specified,
1917 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
1918
1919 tree
1920 build_complex (tree type, tree real, tree imag)
1921 {
1922 tree t = make_node (COMPLEX_CST);
1923
1924 TREE_REALPART (t) = real;
1925 TREE_IMAGPART (t) = imag;
1926 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
1927 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
1928 return t;
1929 }
1930
1931 /* Return a constant of arithmetic type TYPE which is the
1932 multiplicative identity of the set TYPE. */
1933
1934 tree
1935 build_one_cst (tree type)
1936 {
1937 switch (TREE_CODE (type))
1938 {
1939 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1940 case POINTER_TYPE: case REFERENCE_TYPE:
1941 case OFFSET_TYPE:
1942 return build_int_cst (type, 1);
1943
1944 case REAL_TYPE:
1945 return build_real (type, dconst1);
1946
1947 case FIXED_POINT_TYPE:
1948 /* We can only generate 1 for accum types. */
1949 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1950 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
1951
1952 case VECTOR_TYPE:
1953 {
1954 tree scalar = build_one_cst (TREE_TYPE (type));
1955
1956 return build_vector_from_val (type, scalar);
1957 }
1958
1959 case COMPLEX_TYPE:
1960 return build_complex (type,
1961 build_one_cst (TREE_TYPE (type)),
1962 build_zero_cst (TREE_TYPE (type)));
1963
1964 default:
1965 gcc_unreachable ();
1966 }
1967 }
1968
1969 /* Return an integer of type TYPE containing all 1's in as much precision as
1970 it contains, or a complex or vector whose subparts are such integers. */
1971
1972 tree
1973 build_all_ones_cst (tree type)
1974 {
1975 if (TREE_CODE (type) == COMPLEX_TYPE)
1976 {
1977 tree scalar = build_all_ones_cst (TREE_TYPE (type));
1978 return build_complex (type, scalar, scalar);
1979 }
1980 else
1981 return build_minus_one_cst (type);
1982 }
1983
1984 /* Return a constant of arithmetic type TYPE which is the
1985 opposite of the multiplicative identity of the set TYPE. */
1986
1987 tree
1988 build_minus_one_cst (tree type)
1989 {
1990 switch (TREE_CODE (type))
1991 {
1992 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1993 case POINTER_TYPE: case REFERENCE_TYPE:
1994 case OFFSET_TYPE:
1995 return build_int_cst (type, -1);
1996
1997 case REAL_TYPE:
1998 return build_real (type, dconstm1);
1999
2000 case FIXED_POINT_TYPE:
2001 /* We can only generate 1 for accum types. */
2002 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2003 return build_fixed (type, fixed_from_double_int (double_int_minus_one,
2004 TYPE_MODE (type)));
2005
2006 case VECTOR_TYPE:
2007 {
2008 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2009
2010 return build_vector_from_val (type, scalar);
2011 }
2012
2013 case COMPLEX_TYPE:
2014 return build_complex (type,
2015 build_minus_one_cst (TREE_TYPE (type)),
2016 build_zero_cst (TREE_TYPE (type)));
2017
2018 default:
2019 gcc_unreachable ();
2020 }
2021 }
2022
2023 /* Build 0 constant of type TYPE. This is used by constructor folding
2024 and thus the constant should be represented in memory by
2025 zero(es). */
2026
2027 tree
2028 build_zero_cst (tree type)
2029 {
2030 switch (TREE_CODE (type))
2031 {
2032 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2033 case POINTER_TYPE: case REFERENCE_TYPE:
2034 case OFFSET_TYPE: case NULLPTR_TYPE:
2035 return build_int_cst (type, 0);
2036
2037 case REAL_TYPE:
2038 return build_real (type, dconst0);
2039
2040 case FIXED_POINT_TYPE:
2041 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2042
2043 case VECTOR_TYPE:
2044 {
2045 tree scalar = build_zero_cst (TREE_TYPE (type));
2046
2047 return build_vector_from_val (type, scalar);
2048 }
2049
2050 case COMPLEX_TYPE:
2051 {
2052 tree zero = build_zero_cst (TREE_TYPE (type));
2053
2054 return build_complex (type, zero, zero);
2055 }
2056
2057 default:
2058 if (!AGGREGATE_TYPE_P (type))
2059 return fold_convert (type, integer_zero_node);
2060 return build_constructor (type, NULL);
2061 }
2062 }
2063
2064
2065 /* Build a BINFO with LEN language slots. */
2066
2067 tree
2068 make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL)
2069 {
2070 tree t;
2071 size_t length = (offsetof (struct tree_binfo, base_binfos)
2072 + vec<tree, va_gc>::embedded_size (base_binfos));
2073
2074 record_node_allocation_statistics (TREE_BINFO, length);
2075
2076 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2077
2078 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2079
2080 TREE_SET_CODE (t, TREE_BINFO);
2081
2082 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2083
2084 return t;
2085 }
2086
2087 /* Create a CASE_LABEL_EXPR tree node and return it. */
2088
2089 tree
2090 build_case_label (tree low_value, tree high_value, tree label_decl)
2091 {
2092 tree t = make_node (CASE_LABEL_EXPR);
2093
2094 TREE_TYPE (t) = void_type_node;
2095 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2096
2097 CASE_LOW (t) = low_value;
2098 CASE_HIGH (t) = high_value;
2099 CASE_LABEL (t) = label_decl;
2100 CASE_CHAIN (t) = NULL_TREE;
2101
2102 return t;
2103 }
2104
2105 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2106 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2107 The latter determines the length of the HOST_WIDE_INT vector. */
2108
2109 tree
2110 make_int_cst_stat (int len, int ext_len MEM_STAT_DECL)
2111 {
2112 tree t;
2113 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2114 + sizeof (struct tree_int_cst));
2115
2116 gcc_assert (len);
2117 record_node_allocation_statistics (INTEGER_CST, length);
2118
2119 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2120
2121 TREE_SET_CODE (t, INTEGER_CST);
2122 TREE_INT_CST_NUNITS (t) = len;
2123 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2124 /* to_offset can only be applied to trees that are offset_int-sized
2125 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2126 must be exactly the precision of offset_int and so LEN is correct. */
2127 if (ext_len <= OFFSET_INT_ELTS)
2128 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2129 else
2130 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2131
2132 TREE_CONSTANT (t) = 1;
2133
2134 return t;
2135 }
2136
2137 /* Build a newly constructed TREE_VEC node of length LEN. */
2138
2139 tree
2140 make_tree_vec_stat (int len MEM_STAT_DECL)
2141 {
2142 tree t;
2143 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2144
2145 record_node_allocation_statistics (TREE_VEC, length);
2146
2147 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2148
2149 TREE_SET_CODE (t, TREE_VEC);
2150 TREE_VEC_LENGTH (t) = len;
2151
2152 return t;
2153 }
2154
2155 /* Grow a TREE_VEC node to new length LEN. */
2156
2157 tree
2158 grow_tree_vec_stat (tree v, int len MEM_STAT_DECL)
2159 {
2160 gcc_assert (TREE_CODE (v) == TREE_VEC);
2161
2162 int oldlen = TREE_VEC_LENGTH (v);
2163 gcc_assert (len > oldlen);
2164
2165 int oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2166 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2167
2168 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2169
2170 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2171
2172 TREE_VEC_LENGTH (v) = len;
2173
2174 return v;
2175 }
2176 \f
2177 /* Return 1 if EXPR is the integer constant zero or a complex constant
2178 of zero. */
2179
2180 int
2181 integer_zerop (const_tree expr)
2182 {
2183 STRIP_NOPS (expr);
2184
2185 switch (TREE_CODE (expr))
2186 {
2187 case INTEGER_CST:
2188 return wi::eq_p (expr, 0);
2189 case COMPLEX_CST:
2190 return (integer_zerop (TREE_REALPART (expr))
2191 && integer_zerop (TREE_IMAGPART (expr)));
2192 case VECTOR_CST:
2193 {
2194 unsigned i;
2195 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2196 if (!integer_zerop (VECTOR_CST_ELT (expr, i)))
2197 return false;
2198 return true;
2199 }
2200 default:
2201 return false;
2202 }
2203 }
2204
2205 /* Return 1 if EXPR is the integer constant one or the corresponding
2206 complex constant. */
2207
2208 int
2209 integer_onep (const_tree expr)
2210 {
2211 STRIP_NOPS (expr);
2212
2213 switch (TREE_CODE (expr))
2214 {
2215 case INTEGER_CST:
2216 return wi::eq_p (wi::to_widest (expr), 1);
2217 case COMPLEX_CST:
2218 return (integer_onep (TREE_REALPART (expr))
2219 && integer_zerop (TREE_IMAGPART (expr)));
2220 case VECTOR_CST:
2221 {
2222 unsigned i;
2223 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2224 if (!integer_onep (VECTOR_CST_ELT (expr, i)))
2225 return false;
2226 return true;
2227 }
2228 default:
2229 return false;
2230 }
2231 }
2232
2233 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2234 return 1 if every piece is the integer constant one. */
2235
2236 int
2237 integer_each_onep (const_tree expr)
2238 {
2239 STRIP_NOPS (expr);
2240
2241 if (TREE_CODE (expr) == COMPLEX_CST)
2242 return (integer_onep (TREE_REALPART (expr))
2243 && integer_onep (TREE_IMAGPART (expr)));
2244 else
2245 return integer_onep (expr);
2246 }
2247
2248 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2249 it contains, or a complex or vector whose subparts are such integers. */
2250
2251 int
2252 integer_all_onesp (const_tree expr)
2253 {
2254 STRIP_NOPS (expr);
2255
2256 if (TREE_CODE (expr) == COMPLEX_CST
2257 && integer_all_onesp (TREE_REALPART (expr))
2258 && integer_all_onesp (TREE_IMAGPART (expr)))
2259 return 1;
2260
2261 else if (TREE_CODE (expr) == VECTOR_CST)
2262 {
2263 unsigned i;
2264 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2265 if (!integer_all_onesp (VECTOR_CST_ELT (expr, i)))
2266 return 0;
2267 return 1;
2268 }
2269
2270 else if (TREE_CODE (expr) != INTEGER_CST)
2271 return 0;
2272
2273 return wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED) == expr;
2274 }
2275
2276 /* Return 1 if EXPR is the integer constant minus one. */
2277
2278 int
2279 integer_minus_onep (const_tree expr)
2280 {
2281 STRIP_NOPS (expr);
2282
2283 if (TREE_CODE (expr) == COMPLEX_CST)
2284 return (integer_all_onesp (TREE_REALPART (expr))
2285 && integer_zerop (TREE_IMAGPART (expr)));
2286 else
2287 return integer_all_onesp (expr);
2288 }
2289
2290 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2291 one bit on). */
2292
2293 int
2294 integer_pow2p (const_tree expr)
2295 {
2296 STRIP_NOPS (expr);
2297
2298 if (TREE_CODE (expr) == COMPLEX_CST
2299 && integer_pow2p (TREE_REALPART (expr))
2300 && integer_zerop (TREE_IMAGPART (expr)))
2301 return 1;
2302
2303 if (TREE_CODE (expr) != INTEGER_CST)
2304 return 0;
2305
2306 return wi::popcount (expr) == 1;
2307 }
2308
2309 /* Return 1 if EXPR is an integer constant other than zero or a
2310 complex constant other than zero. */
2311
2312 int
2313 integer_nonzerop (const_tree expr)
2314 {
2315 STRIP_NOPS (expr);
2316
2317 return ((TREE_CODE (expr) == INTEGER_CST
2318 && !wi::eq_p (expr, 0))
2319 || (TREE_CODE (expr) == COMPLEX_CST
2320 && (integer_nonzerop (TREE_REALPART (expr))
2321 || integer_nonzerop (TREE_IMAGPART (expr)))));
2322 }
2323
2324 /* Return 1 if EXPR is the integer constant one. For vector,
2325 return 1 if every piece is the integer constant minus one
2326 (representing the value TRUE). */
2327
2328 int
2329 integer_truep (const_tree expr)
2330 {
2331 STRIP_NOPS (expr);
2332
2333 if (TREE_CODE (expr) == VECTOR_CST)
2334 return integer_all_onesp (expr);
2335 return integer_onep (expr);
2336 }
2337
2338 /* Return 1 if EXPR is the fixed-point constant zero. */
2339
2340 int
2341 fixed_zerop (const_tree expr)
2342 {
2343 return (TREE_CODE (expr) == FIXED_CST
2344 && TREE_FIXED_CST (expr).data.is_zero ());
2345 }
2346
2347 /* Return the power of two represented by a tree node known to be a
2348 power of two. */
2349
2350 int
2351 tree_log2 (const_tree expr)
2352 {
2353 STRIP_NOPS (expr);
2354
2355 if (TREE_CODE (expr) == COMPLEX_CST)
2356 return tree_log2 (TREE_REALPART (expr));
2357
2358 return wi::exact_log2 (expr);
2359 }
2360
2361 /* Similar, but return the largest integer Y such that 2 ** Y is less
2362 than or equal to EXPR. */
2363
2364 int
2365 tree_floor_log2 (const_tree expr)
2366 {
2367 STRIP_NOPS (expr);
2368
2369 if (TREE_CODE (expr) == COMPLEX_CST)
2370 return tree_log2 (TREE_REALPART (expr));
2371
2372 return wi::floor_log2 (expr);
2373 }
2374
2375 /* Return number of known trailing zero bits in EXPR, or, if the value of
2376 EXPR is known to be zero, the precision of it's type. */
2377
2378 unsigned int
2379 tree_ctz (const_tree expr)
2380 {
2381 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2382 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2383 return 0;
2384
2385 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2386 switch (TREE_CODE (expr))
2387 {
2388 case INTEGER_CST:
2389 ret1 = wi::ctz (expr);
2390 return MIN (ret1, prec);
2391 case SSA_NAME:
2392 ret1 = wi::ctz (get_nonzero_bits (expr));
2393 return MIN (ret1, prec);
2394 case PLUS_EXPR:
2395 case MINUS_EXPR:
2396 case BIT_IOR_EXPR:
2397 case BIT_XOR_EXPR:
2398 case MIN_EXPR:
2399 case MAX_EXPR:
2400 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2401 if (ret1 == 0)
2402 return ret1;
2403 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2404 return MIN (ret1, ret2);
2405 case POINTER_PLUS_EXPR:
2406 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2407 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2408 /* Second operand is sizetype, which could be in theory
2409 wider than pointer's precision. Make sure we never
2410 return more than prec. */
2411 ret2 = MIN (ret2, prec);
2412 return MIN (ret1, ret2);
2413 case BIT_AND_EXPR:
2414 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2415 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2416 return MAX (ret1, ret2);
2417 case MULT_EXPR:
2418 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2419 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2420 return MIN (ret1 + ret2, prec);
2421 case LSHIFT_EXPR:
2422 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2423 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2424 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2425 {
2426 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2427 return MIN (ret1 + ret2, prec);
2428 }
2429 return ret1;
2430 case RSHIFT_EXPR:
2431 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2432 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2433 {
2434 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2435 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2436 if (ret1 > ret2)
2437 return ret1 - ret2;
2438 }
2439 return 0;
2440 case TRUNC_DIV_EXPR:
2441 case CEIL_DIV_EXPR:
2442 case FLOOR_DIV_EXPR:
2443 case ROUND_DIV_EXPR:
2444 case EXACT_DIV_EXPR:
2445 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2446 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2447 {
2448 int l = tree_log2 (TREE_OPERAND (expr, 1));
2449 if (l >= 0)
2450 {
2451 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2452 ret2 = l;
2453 if (ret1 > ret2)
2454 return ret1 - ret2;
2455 }
2456 }
2457 return 0;
2458 CASE_CONVERT:
2459 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2460 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2461 ret1 = prec;
2462 return MIN (ret1, prec);
2463 case SAVE_EXPR:
2464 return tree_ctz (TREE_OPERAND (expr, 0));
2465 case COND_EXPR:
2466 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2467 if (ret1 == 0)
2468 return 0;
2469 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2470 return MIN (ret1, ret2);
2471 case COMPOUND_EXPR:
2472 return tree_ctz (TREE_OPERAND (expr, 1));
2473 case ADDR_EXPR:
2474 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2475 if (ret1 > BITS_PER_UNIT)
2476 {
2477 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2478 return MIN (ret1, prec);
2479 }
2480 return 0;
2481 default:
2482 return 0;
2483 }
2484 }
2485
2486 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2487 decimal float constants, so don't return 1 for them. */
2488
2489 int
2490 real_zerop (const_tree expr)
2491 {
2492 STRIP_NOPS (expr);
2493
2494 switch (TREE_CODE (expr))
2495 {
2496 case REAL_CST:
2497 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst0)
2498 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2499 case COMPLEX_CST:
2500 return real_zerop (TREE_REALPART (expr))
2501 && real_zerop (TREE_IMAGPART (expr));
2502 case VECTOR_CST:
2503 {
2504 unsigned i;
2505 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2506 if (!real_zerop (VECTOR_CST_ELT (expr, i)))
2507 return false;
2508 return true;
2509 }
2510 default:
2511 return false;
2512 }
2513 }
2514
2515 /* Return 1 if EXPR is the real constant one in real or complex form.
2516 Trailing zeroes matter for decimal float constants, so don't return
2517 1 for them. */
2518
2519 int
2520 real_onep (const_tree expr)
2521 {
2522 STRIP_NOPS (expr);
2523
2524 switch (TREE_CODE (expr))
2525 {
2526 case REAL_CST:
2527 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst1)
2528 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2529 case COMPLEX_CST:
2530 return real_onep (TREE_REALPART (expr))
2531 && real_zerop (TREE_IMAGPART (expr));
2532 case VECTOR_CST:
2533 {
2534 unsigned i;
2535 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2536 if (!real_onep (VECTOR_CST_ELT (expr, i)))
2537 return false;
2538 return true;
2539 }
2540 default:
2541 return false;
2542 }
2543 }
2544
2545 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2546 matter for decimal float constants, so don't return 1 for them. */
2547
2548 int
2549 real_minus_onep (const_tree expr)
2550 {
2551 STRIP_NOPS (expr);
2552
2553 switch (TREE_CODE (expr))
2554 {
2555 case REAL_CST:
2556 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconstm1)
2557 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2558 case COMPLEX_CST:
2559 return real_minus_onep (TREE_REALPART (expr))
2560 && real_zerop (TREE_IMAGPART (expr));
2561 case VECTOR_CST:
2562 {
2563 unsigned i;
2564 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2565 if (!real_minus_onep (VECTOR_CST_ELT (expr, i)))
2566 return false;
2567 return true;
2568 }
2569 default:
2570 return false;
2571 }
2572 }
2573
2574 /* Nonzero if EXP is a constant or a cast of a constant. */
2575
2576 int
2577 really_constant_p (const_tree exp)
2578 {
2579 /* This is not quite the same as STRIP_NOPS. It does more. */
2580 while (CONVERT_EXPR_P (exp)
2581 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2582 exp = TREE_OPERAND (exp, 0);
2583 return TREE_CONSTANT (exp);
2584 }
2585 \f
2586 /* Return first list element whose TREE_VALUE is ELEM.
2587 Return 0 if ELEM is not in LIST. */
2588
2589 tree
2590 value_member (tree elem, tree list)
2591 {
2592 while (list)
2593 {
2594 if (elem == TREE_VALUE (list))
2595 return list;
2596 list = TREE_CHAIN (list);
2597 }
2598 return NULL_TREE;
2599 }
2600
2601 /* Return first list element whose TREE_PURPOSE is ELEM.
2602 Return 0 if ELEM is not in LIST. */
2603
2604 tree
2605 purpose_member (const_tree elem, tree list)
2606 {
2607 while (list)
2608 {
2609 if (elem == TREE_PURPOSE (list))
2610 return list;
2611 list = TREE_CHAIN (list);
2612 }
2613 return NULL_TREE;
2614 }
2615
2616 /* Return true if ELEM is in V. */
2617
2618 bool
2619 vec_member (const_tree elem, vec<tree, va_gc> *v)
2620 {
2621 unsigned ix;
2622 tree t;
2623 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2624 if (elem == t)
2625 return true;
2626 return false;
2627 }
2628
2629 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2630 NULL_TREE. */
2631
2632 tree
2633 chain_index (int idx, tree chain)
2634 {
2635 for (; chain && idx > 0; --idx)
2636 chain = TREE_CHAIN (chain);
2637 return chain;
2638 }
2639
2640 /* Return nonzero if ELEM is part of the chain CHAIN. */
2641
2642 int
2643 chain_member (const_tree elem, const_tree chain)
2644 {
2645 while (chain)
2646 {
2647 if (elem == chain)
2648 return 1;
2649 chain = DECL_CHAIN (chain);
2650 }
2651
2652 return 0;
2653 }
2654
2655 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2656 We expect a null pointer to mark the end of the chain.
2657 This is the Lisp primitive `length'. */
2658
2659 int
2660 list_length (const_tree t)
2661 {
2662 const_tree p = t;
2663 #ifdef ENABLE_TREE_CHECKING
2664 const_tree q = t;
2665 #endif
2666 int len = 0;
2667
2668 while (p)
2669 {
2670 p = TREE_CHAIN (p);
2671 #ifdef ENABLE_TREE_CHECKING
2672 if (len % 2)
2673 q = TREE_CHAIN (q);
2674 gcc_assert (p != q);
2675 #endif
2676 len++;
2677 }
2678
2679 return len;
2680 }
2681
2682 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2683 UNION_TYPE TYPE, or NULL_TREE if none. */
2684
2685 tree
2686 first_field (const_tree type)
2687 {
2688 tree t = TYPE_FIELDS (type);
2689 while (t && TREE_CODE (t) != FIELD_DECL)
2690 t = TREE_CHAIN (t);
2691 return t;
2692 }
2693
2694 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2695 by modifying the last node in chain 1 to point to chain 2.
2696 This is the Lisp primitive `nconc'. */
2697
2698 tree
2699 chainon (tree op1, tree op2)
2700 {
2701 tree t1;
2702
2703 if (!op1)
2704 return op2;
2705 if (!op2)
2706 return op1;
2707
2708 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
2709 continue;
2710 TREE_CHAIN (t1) = op2;
2711
2712 #ifdef ENABLE_TREE_CHECKING
2713 {
2714 tree t2;
2715 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
2716 gcc_assert (t2 != t1);
2717 }
2718 #endif
2719
2720 return op1;
2721 }
2722
2723 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2724
2725 tree
2726 tree_last (tree chain)
2727 {
2728 tree next;
2729 if (chain)
2730 while ((next = TREE_CHAIN (chain)))
2731 chain = next;
2732 return chain;
2733 }
2734
2735 /* Reverse the order of elements in the chain T,
2736 and return the new head of the chain (old last element). */
2737
2738 tree
2739 nreverse (tree t)
2740 {
2741 tree prev = 0, decl, next;
2742 for (decl = t; decl; decl = next)
2743 {
2744 /* We shouldn't be using this function to reverse BLOCK chains; we
2745 have blocks_nreverse for that. */
2746 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
2747 next = TREE_CHAIN (decl);
2748 TREE_CHAIN (decl) = prev;
2749 prev = decl;
2750 }
2751 return prev;
2752 }
2753 \f
2754 /* Return a newly created TREE_LIST node whose
2755 purpose and value fields are PARM and VALUE. */
2756
2757 tree
2758 build_tree_list_stat (tree parm, tree value MEM_STAT_DECL)
2759 {
2760 tree t = make_node_stat (TREE_LIST PASS_MEM_STAT);
2761 TREE_PURPOSE (t) = parm;
2762 TREE_VALUE (t) = value;
2763 return t;
2764 }
2765
2766 /* Build a chain of TREE_LIST nodes from a vector. */
2767
2768 tree
2769 build_tree_list_vec_stat (const vec<tree, va_gc> *vec MEM_STAT_DECL)
2770 {
2771 tree ret = NULL_TREE;
2772 tree *pp = &ret;
2773 unsigned int i;
2774 tree t;
2775 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
2776 {
2777 *pp = build_tree_list_stat (NULL, t PASS_MEM_STAT);
2778 pp = &TREE_CHAIN (*pp);
2779 }
2780 return ret;
2781 }
2782
2783 /* Return a newly created TREE_LIST node whose
2784 purpose and value fields are PURPOSE and VALUE
2785 and whose TREE_CHAIN is CHAIN. */
2786
2787 tree
2788 tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL)
2789 {
2790 tree node;
2791
2792 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
2793 memset (node, 0, sizeof (struct tree_common));
2794
2795 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
2796
2797 TREE_SET_CODE (node, TREE_LIST);
2798 TREE_CHAIN (node) = chain;
2799 TREE_PURPOSE (node) = purpose;
2800 TREE_VALUE (node) = value;
2801 return node;
2802 }
2803
2804 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2805 trees. */
2806
2807 vec<tree, va_gc> *
2808 ctor_to_vec (tree ctor)
2809 {
2810 vec<tree, va_gc> *vec;
2811 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
2812 unsigned int ix;
2813 tree val;
2814
2815 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
2816 vec->quick_push (val);
2817
2818 return vec;
2819 }
2820 \f
2821 /* Return the size nominally occupied by an object of type TYPE
2822 when it resides in memory. The value is measured in units of bytes,
2823 and its data type is that normally used for type sizes
2824 (which is the first type created by make_signed_type or
2825 make_unsigned_type). */
2826
2827 tree
2828 size_in_bytes (const_tree type)
2829 {
2830 tree t;
2831
2832 if (type == error_mark_node)
2833 return integer_zero_node;
2834
2835 type = TYPE_MAIN_VARIANT (type);
2836 t = TYPE_SIZE_UNIT (type);
2837
2838 if (t == 0)
2839 {
2840 lang_hooks.types.incomplete_type_error (NULL_TREE, type);
2841 return size_zero_node;
2842 }
2843
2844 return t;
2845 }
2846
2847 /* Return the size of TYPE (in bytes) as a wide integer
2848 or return -1 if the size can vary or is larger than an integer. */
2849
2850 HOST_WIDE_INT
2851 int_size_in_bytes (const_tree type)
2852 {
2853 tree t;
2854
2855 if (type == error_mark_node)
2856 return 0;
2857
2858 type = TYPE_MAIN_VARIANT (type);
2859 t = TYPE_SIZE_UNIT (type);
2860
2861 if (t && tree_fits_uhwi_p (t))
2862 return TREE_INT_CST_LOW (t);
2863 else
2864 return -1;
2865 }
2866
2867 /* Return the maximum size of TYPE (in bytes) as a wide integer
2868 or return -1 if the size can vary or is larger than an integer. */
2869
2870 HOST_WIDE_INT
2871 max_int_size_in_bytes (const_tree type)
2872 {
2873 HOST_WIDE_INT size = -1;
2874 tree size_tree;
2875
2876 /* If this is an array type, check for a possible MAX_SIZE attached. */
2877
2878 if (TREE_CODE (type) == ARRAY_TYPE)
2879 {
2880 size_tree = TYPE_ARRAY_MAX_SIZE (type);
2881
2882 if (size_tree && tree_fits_uhwi_p (size_tree))
2883 size = tree_to_uhwi (size_tree);
2884 }
2885
2886 /* If we still haven't been able to get a size, see if the language
2887 can compute a maximum size. */
2888
2889 if (size == -1)
2890 {
2891 size_tree = lang_hooks.types.max_size (type);
2892
2893 if (size_tree && tree_fits_uhwi_p (size_tree))
2894 size = tree_to_uhwi (size_tree);
2895 }
2896
2897 return size;
2898 }
2899 \f
2900 /* Return the bit position of FIELD, in bits from the start of the record.
2901 This is a tree of type bitsizetype. */
2902
2903 tree
2904 bit_position (const_tree field)
2905 {
2906 return bit_from_pos (DECL_FIELD_OFFSET (field),
2907 DECL_FIELD_BIT_OFFSET (field));
2908 }
2909 \f
2910 /* Return the byte position of FIELD, in bytes from the start of the record.
2911 This is a tree of type sizetype. */
2912
2913 tree
2914 byte_position (const_tree field)
2915 {
2916 return byte_from_pos (DECL_FIELD_OFFSET (field),
2917 DECL_FIELD_BIT_OFFSET (field));
2918 }
2919
2920 /* Likewise, but return as an integer. It must be representable in
2921 that way (since it could be a signed value, we don't have the
2922 option of returning -1 like int_size_in_byte can. */
2923
2924 HOST_WIDE_INT
2925 int_byte_position (const_tree field)
2926 {
2927 return tree_to_shwi (byte_position (field));
2928 }
2929 \f
2930 /* Return the strictest alignment, in bits, that T is known to have. */
2931
2932 unsigned int
2933 expr_align (const_tree t)
2934 {
2935 unsigned int align0, align1;
2936
2937 switch (TREE_CODE (t))
2938 {
2939 CASE_CONVERT: case NON_LVALUE_EXPR:
2940 /* If we have conversions, we know that the alignment of the
2941 object must meet each of the alignments of the types. */
2942 align0 = expr_align (TREE_OPERAND (t, 0));
2943 align1 = TYPE_ALIGN (TREE_TYPE (t));
2944 return MAX (align0, align1);
2945
2946 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
2947 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
2948 case CLEANUP_POINT_EXPR:
2949 /* These don't change the alignment of an object. */
2950 return expr_align (TREE_OPERAND (t, 0));
2951
2952 case COND_EXPR:
2953 /* The best we can do is say that the alignment is the least aligned
2954 of the two arms. */
2955 align0 = expr_align (TREE_OPERAND (t, 1));
2956 align1 = expr_align (TREE_OPERAND (t, 2));
2957 return MIN (align0, align1);
2958
2959 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
2960 meaningfully, it's always 1. */
2961 case LABEL_DECL: case CONST_DECL:
2962 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
2963 case FUNCTION_DECL:
2964 gcc_assert (DECL_ALIGN (t) != 0);
2965 return DECL_ALIGN (t);
2966
2967 default:
2968 break;
2969 }
2970
2971 /* Otherwise take the alignment from that of the type. */
2972 return TYPE_ALIGN (TREE_TYPE (t));
2973 }
2974 \f
2975 /* Return, as a tree node, the number of elements for TYPE (which is an
2976 ARRAY_TYPE) minus one. This counts only elements of the top array. */
2977
2978 tree
2979 array_type_nelts (const_tree type)
2980 {
2981 tree index_type, min, max;
2982
2983 /* If they did it with unspecified bounds, then we should have already
2984 given an error about it before we got here. */
2985 if (! TYPE_DOMAIN (type))
2986 return error_mark_node;
2987
2988 index_type = TYPE_DOMAIN (type);
2989 min = TYPE_MIN_VALUE (index_type);
2990 max = TYPE_MAX_VALUE (index_type);
2991
2992 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
2993 if (!max)
2994 return error_mark_node;
2995
2996 return (integer_zerop (min)
2997 ? max
2998 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
2999 }
3000 \f
3001 /* If arg is static -- a reference to an object in static storage -- then
3002 return the object. This is not the same as the C meaning of `static'.
3003 If arg isn't static, return NULL. */
3004
3005 tree
3006 staticp (tree arg)
3007 {
3008 switch (TREE_CODE (arg))
3009 {
3010 case FUNCTION_DECL:
3011 /* Nested functions are static, even though taking their address will
3012 involve a trampoline as we unnest the nested function and create
3013 the trampoline on the tree level. */
3014 return arg;
3015
3016 case VAR_DECL:
3017 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3018 && ! DECL_THREAD_LOCAL_P (arg)
3019 && ! DECL_DLLIMPORT_P (arg)
3020 ? arg : NULL);
3021
3022 case CONST_DECL:
3023 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3024 ? arg : NULL);
3025
3026 case CONSTRUCTOR:
3027 return TREE_STATIC (arg) ? arg : NULL;
3028
3029 case LABEL_DECL:
3030 case STRING_CST:
3031 return arg;
3032
3033 case COMPONENT_REF:
3034 /* If the thing being referenced is not a field, then it is
3035 something language specific. */
3036 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3037
3038 /* If we are referencing a bitfield, we can't evaluate an
3039 ADDR_EXPR at compile time and so it isn't a constant. */
3040 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3041 return NULL;
3042
3043 return staticp (TREE_OPERAND (arg, 0));
3044
3045 case BIT_FIELD_REF:
3046 return NULL;
3047
3048 case INDIRECT_REF:
3049 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3050
3051 case ARRAY_REF:
3052 case ARRAY_RANGE_REF:
3053 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3054 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3055 return staticp (TREE_OPERAND (arg, 0));
3056 else
3057 return NULL;
3058
3059 case COMPOUND_LITERAL_EXPR:
3060 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3061
3062 default:
3063 return NULL;
3064 }
3065 }
3066
3067 \f
3068
3069
3070 /* Return whether OP is a DECL whose address is function-invariant. */
3071
3072 bool
3073 decl_address_invariant_p (const_tree op)
3074 {
3075 /* The conditions below are slightly less strict than the one in
3076 staticp. */
3077
3078 switch (TREE_CODE (op))
3079 {
3080 case PARM_DECL:
3081 case RESULT_DECL:
3082 case LABEL_DECL:
3083 case FUNCTION_DECL:
3084 return true;
3085
3086 case VAR_DECL:
3087 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3088 || DECL_THREAD_LOCAL_P (op)
3089 || DECL_CONTEXT (op) == current_function_decl
3090 || decl_function_context (op) == current_function_decl)
3091 return true;
3092 break;
3093
3094 case CONST_DECL:
3095 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3096 || decl_function_context (op) == current_function_decl)
3097 return true;
3098 break;
3099
3100 default:
3101 break;
3102 }
3103
3104 return false;
3105 }
3106
3107 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3108
3109 bool
3110 decl_address_ip_invariant_p (const_tree op)
3111 {
3112 /* The conditions below are slightly less strict than the one in
3113 staticp. */
3114
3115 switch (TREE_CODE (op))
3116 {
3117 case LABEL_DECL:
3118 case FUNCTION_DECL:
3119 case STRING_CST:
3120 return true;
3121
3122 case VAR_DECL:
3123 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3124 && !DECL_DLLIMPORT_P (op))
3125 || DECL_THREAD_LOCAL_P (op))
3126 return true;
3127 break;
3128
3129 case CONST_DECL:
3130 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3131 return true;
3132 break;
3133
3134 default:
3135 break;
3136 }
3137
3138 return false;
3139 }
3140
3141
3142 /* Return true if T is function-invariant (internal function, does
3143 not handle arithmetic; that's handled in skip_simple_arithmetic and
3144 tree_invariant_p). */
3145
3146 static bool tree_invariant_p (tree t);
3147
3148 static bool
3149 tree_invariant_p_1 (tree t)
3150 {
3151 tree op;
3152
3153 if (TREE_CONSTANT (t)
3154 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3155 return true;
3156
3157 switch (TREE_CODE (t))
3158 {
3159 case SAVE_EXPR:
3160 return true;
3161
3162 case ADDR_EXPR:
3163 op = TREE_OPERAND (t, 0);
3164 while (handled_component_p (op))
3165 {
3166 switch (TREE_CODE (op))
3167 {
3168 case ARRAY_REF:
3169 case ARRAY_RANGE_REF:
3170 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3171 || TREE_OPERAND (op, 2) != NULL_TREE
3172 || TREE_OPERAND (op, 3) != NULL_TREE)
3173 return false;
3174 break;
3175
3176 case COMPONENT_REF:
3177 if (TREE_OPERAND (op, 2) != NULL_TREE)
3178 return false;
3179 break;
3180
3181 default:;
3182 }
3183 op = TREE_OPERAND (op, 0);
3184 }
3185
3186 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3187
3188 default:
3189 break;
3190 }
3191
3192 return false;
3193 }
3194
3195 /* Return true if T is function-invariant. */
3196
3197 static bool
3198 tree_invariant_p (tree t)
3199 {
3200 tree inner = skip_simple_arithmetic (t);
3201 return tree_invariant_p_1 (inner);
3202 }
3203
3204 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3205 Do this to any expression which may be used in more than one place,
3206 but must be evaluated only once.
3207
3208 Normally, expand_expr would reevaluate the expression each time.
3209 Calling save_expr produces something that is evaluated and recorded
3210 the first time expand_expr is called on it. Subsequent calls to
3211 expand_expr just reuse the recorded value.
3212
3213 The call to expand_expr that generates code that actually computes
3214 the value is the first call *at compile time*. Subsequent calls
3215 *at compile time* generate code to use the saved value.
3216 This produces correct result provided that *at run time* control
3217 always flows through the insns made by the first expand_expr
3218 before reaching the other places where the save_expr was evaluated.
3219 You, the caller of save_expr, must make sure this is so.
3220
3221 Constants, and certain read-only nodes, are returned with no
3222 SAVE_EXPR because that is safe. Expressions containing placeholders
3223 are not touched; see tree.def for an explanation of what these
3224 are used for. */
3225
3226 tree
3227 save_expr (tree expr)
3228 {
3229 tree t = fold (expr);
3230 tree inner;
3231
3232 /* If the tree evaluates to a constant, then we don't want to hide that
3233 fact (i.e. this allows further folding, and direct checks for constants).
3234 However, a read-only object that has side effects cannot be bypassed.
3235 Since it is no problem to reevaluate literals, we just return the
3236 literal node. */
3237 inner = skip_simple_arithmetic (t);
3238 if (TREE_CODE (inner) == ERROR_MARK)
3239 return inner;
3240
3241 if (tree_invariant_p_1 (inner))
3242 return t;
3243
3244 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3245 it means that the size or offset of some field of an object depends on
3246 the value within another field.
3247
3248 Note that it must not be the case that T contains both a PLACEHOLDER_EXPR
3249 and some variable since it would then need to be both evaluated once and
3250 evaluated more than once. Front-ends must assure this case cannot
3251 happen by surrounding any such subexpressions in their own SAVE_EXPR
3252 and forcing evaluation at the proper time. */
3253 if (contains_placeholder_p (inner))
3254 return t;
3255
3256 t = build1 (SAVE_EXPR, TREE_TYPE (expr), t);
3257 SET_EXPR_LOCATION (t, EXPR_LOCATION (expr));
3258
3259 /* This expression might be placed ahead of a jump to ensure that the
3260 value was computed on both sides of the jump. So make sure it isn't
3261 eliminated as dead. */
3262 TREE_SIDE_EFFECTS (t) = 1;
3263 return t;
3264 }
3265
3266 /* Look inside EXPR into any simple arithmetic operations. Return the
3267 outermost non-arithmetic or non-invariant node. */
3268
3269 tree
3270 skip_simple_arithmetic (tree expr)
3271 {
3272 /* We don't care about whether this can be used as an lvalue in this
3273 context. */
3274 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3275 expr = TREE_OPERAND (expr, 0);
3276
3277 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3278 a constant, it will be more efficient to not make another SAVE_EXPR since
3279 it will allow better simplification and GCSE will be able to merge the
3280 computations if they actually occur. */
3281 while (true)
3282 {
3283 if (UNARY_CLASS_P (expr))
3284 expr = TREE_OPERAND (expr, 0);
3285 else if (BINARY_CLASS_P (expr))
3286 {
3287 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3288 expr = TREE_OPERAND (expr, 0);
3289 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3290 expr = TREE_OPERAND (expr, 1);
3291 else
3292 break;
3293 }
3294 else
3295 break;
3296 }
3297
3298 return expr;
3299 }
3300
3301 /* Look inside EXPR into simple arithmetic operations involving constants.
3302 Return the outermost non-arithmetic or non-constant node. */
3303
3304 tree
3305 skip_simple_constant_arithmetic (tree expr)
3306 {
3307 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3308 expr = TREE_OPERAND (expr, 0);
3309
3310 while (true)
3311 {
3312 if (UNARY_CLASS_P (expr))
3313 expr = TREE_OPERAND (expr, 0);
3314 else if (BINARY_CLASS_P (expr))
3315 {
3316 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3317 expr = TREE_OPERAND (expr, 0);
3318 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3319 expr = TREE_OPERAND (expr, 1);
3320 else
3321 break;
3322 }
3323 else
3324 break;
3325 }
3326
3327 return expr;
3328 }
3329
3330 /* Return which tree structure is used by T. */
3331
3332 enum tree_node_structure_enum
3333 tree_node_structure (const_tree t)
3334 {
3335 const enum tree_code code = TREE_CODE (t);
3336 return tree_node_structure_for_code (code);
3337 }
3338
3339 /* Set various status flags when building a CALL_EXPR object T. */
3340
3341 static void
3342 process_call_operands (tree t)
3343 {
3344 bool side_effects = TREE_SIDE_EFFECTS (t);
3345 bool read_only = false;
3346 int i = call_expr_flags (t);
3347
3348 /* Calls have side-effects, except those to const or pure functions. */
3349 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3350 side_effects = true;
3351 /* Propagate TREE_READONLY of arguments for const functions. */
3352 if (i & ECF_CONST)
3353 read_only = true;
3354
3355 if (!side_effects || read_only)
3356 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3357 {
3358 tree op = TREE_OPERAND (t, i);
3359 if (op && TREE_SIDE_EFFECTS (op))
3360 side_effects = true;
3361 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3362 read_only = false;
3363 }
3364
3365 TREE_SIDE_EFFECTS (t) = side_effects;
3366 TREE_READONLY (t) = read_only;
3367 }
3368 \f
3369 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3370 size or offset that depends on a field within a record. */
3371
3372 bool
3373 contains_placeholder_p (const_tree exp)
3374 {
3375 enum tree_code code;
3376
3377 if (!exp)
3378 return 0;
3379
3380 code = TREE_CODE (exp);
3381 if (code == PLACEHOLDER_EXPR)
3382 return 1;
3383
3384 switch (TREE_CODE_CLASS (code))
3385 {
3386 case tcc_reference:
3387 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3388 position computations since they will be converted into a
3389 WITH_RECORD_EXPR involving the reference, which will assume
3390 here will be valid. */
3391 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3392
3393 case tcc_exceptional:
3394 if (code == TREE_LIST)
3395 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3396 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3397 break;
3398
3399 case tcc_unary:
3400 case tcc_binary:
3401 case tcc_comparison:
3402 case tcc_expression:
3403 switch (code)
3404 {
3405 case COMPOUND_EXPR:
3406 /* Ignoring the first operand isn't quite right, but works best. */
3407 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3408
3409 case COND_EXPR:
3410 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3411 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3412 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3413
3414 case SAVE_EXPR:
3415 /* The save_expr function never wraps anything containing
3416 a PLACEHOLDER_EXPR. */
3417 return 0;
3418
3419 default:
3420 break;
3421 }
3422
3423 switch (TREE_CODE_LENGTH (code))
3424 {
3425 case 1:
3426 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3427 case 2:
3428 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3429 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3430 default:
3431 return 0;
3432 }
3433
3434 case tcc_vl_exp:
3435 switch (code)
3436 {
3437 case CALL_EXPR:
3438 {
3439 const_tree arg;
3440 const_call_expr_arg_iterator iter;
3441 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3442 if (CONTAINS_PLACEHOLDER_P (arg))
3443 return 1;
3444 return 0;
3445 }
3446 default:
3447 return 0;
3448 }
3449
3450 default:
3451 return 0;
3452 }
3453 return 0;
3454 }
3455
3456 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3457 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3458 field positions. */
3459
3460 static bool
3461 type_contains_placeholder_1 (const_tree type)
3462 {
3463 /* If the size contains a placeholder or the parent type (component type in
3464 the case of arrays) type involves a placeholder, this type does. */
3465 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3466 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3467 || (!POINTER_TYPE_P (type)
3468 && TREE_TYPE (type)
3469 && type_contains_placeholder_p (TREE_TYPE (type))))
3470 return true;
3471
3472 /* Now do type-specific checks. Note that the last part of the check above
3473 greatly limits what we have to do below. */
3474 switch (TREE_CODE (type))
3475 {
3476 case VOID_TYPE:
3477 case POINTER_BOUNDS_TYPE:
3478 case COMPLEX_TYPE:
3479 case ENUMERAL_TYPE:
3480 case BOOLEAN_TYPE:
3481 case POINTER_TYPE:
3482 case OFFSET_TYPE:
3483 case REFERENCE_TYPE:
3484 case METHOD_TYPE:
3485 case FUNCTION_TYPE:
3486 case VECTOR_TYPE:
3487 case NULLPTR_TYPE:
3488 return false;
3489
3490 case INTEGER_TYPE:
3491 case REAL_TYPE:
3492 case FIXED_POINT_TYPE:
3493 /* Here we just check the bounds. */
3494 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3495 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3496
3497 case ARRAY_TYPE:
3498 /* We have already checked the component type above, so just check the
3499 domain type. */
3500 return type_contains_placeholder_p (TYPE_DOMAIN (type));
3501
3502 case RECORD_TYPE:
3503 case UNION_TYPE:
3504 case QUAL_UNION_TYPE:
3505 {
3506 tree field;
3507
3508 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3509 if (TREE_CODE (field) == FIELD_DECL
3510 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3511 || (TREE_CODE (type) == QUAL_UNION_TYPE
3512 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3513 || type_contains_placeholder_p (TREE_TYPE (field))))
3514 return true;
3515
3516 return false;
3517 }
3518
3519 default:
3520 gcc_unreachable ();
3521 }
3522 }
3523
3524 /* Wrapper around above function used to cache its result. */
3525
3526 bool
3527 type_contains_placeholder_p (tree type)
3528 {
3529 bool result;
3530
3531 /* If the contains_placeholder_bits field has been initialized,
3532 then we know the answer. */
3533 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3534 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3535
3536 /* Indicate that we've seen this type node, and the answer is false.
3537 This is what we want to return if we run into recursion via fields. */
3538 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3539
3540 /* Compute the real value. */
3541 result = type_contains_placeholder_1 (type);
3542
3543 /* Store the real value. */
3544 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3545
3546 return result;
3547 }
3548 \f
3549 /* Push tree EXP onto vector QUEUE if it is not already present. */
3550
3551 static void
3552 push_without_duplicates (tree exp, vec<tree> *queue)
3553 {
3554 unsigned int i;
3555 tree iter;
3556
3557 FOR_EACH_VEC_ELT (*queue, i, iter)
3558 if (simple_cst_equal (iter, exp) == 1)
3559 break;
3560
3561 if (!iter)
3562 queue->safe_push (exp);
3563 }
3564
3565 /* Given a tree EXP, find all occurrences of references to fields
3566 in a PLACEHOLDER_EXPR and place them in vector REFS without
3567 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3568 we assume here that EXP contains only arithmetic expressions
3569 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3570 argument list. */
3571
3572 void
3573 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3574 {
3575 enum tree_code code = TREE_CODE (exp);
3576 tree inner;
3577 int i;
3578
3579 /* We handle TREE_LIST and COMPONENT_REF separately. */
3580 if (code == TREE_LIST)
3581 {
3582 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3583 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3584 }
3585 else if (code == COMPONENT_REF)
3586 {
3587 for (inner = TREE_OPERAND (exp, 0);
3588 REFERENCE_CLASS_P (inner);
3589 inner = TREE_OPERAND (inner, 0))
3590 ;
3591
3592 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3593 push_without_duplicates (exp, refs);
3594 else
3595 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3596 }
3597 else
3598 switch (TREE_CODE_CLASS (code))
3599 {
3600 case tcc_constant:
3601 break;
3602
3603 case tcc_declaration:
3604 /* Variables allocated to static storage can stay. */
3605 if (!TREE_STATIC (exp))
3606 push_without_duplicates (exp, refs);
3607 break;
3608
3609 case tcc_expression:
3610 /* This is the pattern built in ada/make_aligning_type. */
3611 if (code == ADDR_EXPR
3612 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3613 {
3614 push_without_duplicates (exp, refs);
3615 break;
3616 }
3617
3618 /* Fall through... */
3619
3620 case tcc_exceptional:
3621 case tcc_unary:
3622 case tcc_binary:
3623 case tcc_comparison:
3624 case tcc_reference:
3625 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3626 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3627 break;
3628
3629 case tcc_vl_exp:
3630 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3631 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3632 break;
3633
3634 default:
3635 gcc_unreachable ();
3636 }
3637 }
3638
3639 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3640 return a tree with all occurrences of references to F in a
3641 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3642 CONST_DECLs. Note that we assume here that EXP contains only
3643 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3644 occurring only in their argument list. */
3645
3646 tree
3647 substitute_in_expr (tree exp, tree f, tree r)
3648 {
3649 enum tree_code code = TREE_CODE (exp);
3650 tree op0, op1, op2, op3;
3651 tree new_tree;
3652
3653 /* We handle TREE_LIST and COMPONENT_REF separately. */
3654 if (code == TREE_LIST)
3655 {
3656 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3657 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3658 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3659 return exp;
3660
3661 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3662 }
3663 else if (code == COMPONENT_REF)
3664 {
3665 tree inner;
3666
3667 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3668 and it is the right field, replace it with R. */
3669 for (inner = TREE_OPERAND (exp, 0);
3670 REFERENCE_CLASS_P (inner);
3671 inner = TREE_OPERAND (inner, 0))
3672 ;
3673
3674 /* The field. */
3675 op1 = TREE_OPERAND (exp, 1);
3676
3677 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3678 return r;
3679
3680 /* If this expression hasn't been completed let, leave it alone. */
3681 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
3682 return exp;
3683
3684 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3685 if (op0 == TREE_OPERAND (exp, 0))
3686 return exp;
3687
3688 new_tree
3689 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
3690 }
3691 else
3692 switch (TREE_CODE_CLASS (code))
3693 {
3694 case tcc_constant:
3695 return exp;
3696
3697 case tcc_declaration:
3698 if (exp == f)
3699 return r;
3700 else
3701 return exp;
3702
3703 case tcc_expression:
3704 if (exp == f)
3705 return r;
3706
3707 /* Fall through... */
3708
3709 case tcc_exceptional:
3710 case tcc_unary:
3711 case tcc_binary:
3712 case tcc_comparison:
3713 case tcc_reference:
3714 switch (TREE_CODE_LENGTH (code))
3715 {
3716 case 0:
3717 return exp;
3718
3719 case 1:
3720 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3721 if (op0 == TREE_OPERAND (exp, 0))
3722 return exp;
3723
3724 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3725 break;
3726
3727 case 2:
3728 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3729 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3730
3731 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3732 return exp;
3733
3734 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3735 break;
3736
3737 case 3:
3738 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3739 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3740 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3741
3742 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3743 && op2 == TREE_OPERAND (exp, 2))
3744 return exp;
3745
3746 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3747 break;
3748
3749 case 4:
3750 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3751 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3752 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3753 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
3754
3755 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3756 && op2 == TREE_OPERAND (exp, 2)
3757 && op3 == TREE_OPERAND (exp, 3))
3758 return exp;
3759
3760 new_tree
3761 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3762 break;
3763
3764 default:
3765 gcc_unreachable ();
3766 }
3767 break;
3768
3769 case tcc_vl_exp:
3770 {
3771 int i;
3772
3773 new_tree = NULL_TREE;
3774
3775 /* If we are trying to replace F with a constant, inline back
3776 functions which do nothing else than computing a value from
3777 the arguments they are passed. This makes it possible to
3778 fold partially or entirely the replacement expression. */
3779 if (CONSTANT_CLASS_P (r) && code == CALL_EXPR)
3780 {
3781 tree t = maybe_inline_call_in_expr (exp);
3782 if (t)
3783 return SUBSTITUTE_IN_EXPR (t, f, r);
3784 }
3785
3786 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3787 {
3788 tree op = TREE_OPERAND (exp, i);
3789 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
3790 if (new_op != op)
3791 {
3792 if (!new_tree)
3793 new_tree = copy_node (exp);
3794 TREE_OPERAND (new_tree, i) = new_op;
3795 }
3796 }
3797
3798 if (new_tree)
3799 {
3800 new_tree = fold (new_tree);
3801 if (TREE_CODE (new_tree) == CALL_EXPR)
3802 process_call_operands (new_tree);
3803 }
3804 else
3805 return exp;
3806 }
3807 break;
3808
3809 default:
3810 gcc_unreachable ();
3811 }
3812
3813 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3814
3815 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3816 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3817
3818 return new_tree;
3819 }
3820
3821 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3822 for it within OBJ, a tree that is an object or a chain of references. */
3823
3824 tree
3825 substitute_placeholder_in_expr (tree exp, tree obj)
3826 {
3827 enum tree_code code = TREE_CODE (exp);
3828 tree op0, op1, op2, op3;
3829 tree new_tree;
3830
3831 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3832 in the chain of OBJ. */
3833 if (code == PLACEHOLDER_EXPR)
3834 {
3835 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
3836 tree elt;
3837
3838 for (elt = obj; elt != 0;
3839 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3840 || TREE_CODE (elt) == COND_EXPR)
3841 ? TREE_OPERAND (elt, 1)
3842 : (REFERENCE_CLASS_P (elt)
3843 || UNARY_CLASS_P (elt)
3844 || BINARY_CLASS_P (elt)
3845 || VL_EXP_CLASS_P (elt)
3846 || EXPRESSION_CLASS_P (elt))
3847 ? TREE_OPERAND (elt, 0) : 0))
3848 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
3849 return elt;
3850
3851 for (elt = obj; elt != 0;
3852 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3853 || TREE_CODE (elt) == COND_EXPR)
3854 ? TREE_OPERAND (elt, 1)
3855 : (REFERENCE_CLASS_P (elt)
3856 || UNARY_CLASS_P (elt)
3857 || BINARY_CLASS_P (elt)
3858 || VL_EXP_CLASS_P (elt)
3859 || EXPRESSION_CLASS_P (elt))
3860 ? TREE_OPERAND (elt, 0) : 0))
3861 if (POINTER_TYPE_P (TREE_TYPE (elt))
3862 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
3863 == need_type))
3864 return fold_build1 (INDIRECT_REF, need_type, elt);
3865
3866 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
3867 survives until RTL generation, there will be an error. */
3868 return exp;
3869 }
3870
3871 /* TREE_LIST is special because we need to look at TREE_VALUE
3872 and TREE_CHAIN, not TREE_OPERANDS. */
3873 else if (code == TREE_LIST)
3874 {
3875 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
3876 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
3877 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3878 return exp;
3879
3880 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3881 }
3882 else
3883 switch (TREE_CODE_CLASS (code))
3884 {
3885 case tcc_constant:
3886 case tcc_declaration:
3887 return exp;
3888
3889 case tcc_exceptional:
3890 case tcc_unary:
3891 case tcc_binary:
3892 case tcc_comparison:
3893 case tcc_expression:
3894 case tcc_reference:
3895 case tcc_statement:
3896 switch (TREE_CODE_LENGTH (code))
3897 {
3898 case 0:
3899 return exp;
3900
3901 case 1:
3902 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3903 if (op0 == TREE_OPERAND (exp, 0))
3904 return exp;
3905
3906 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3907 break;
3908
3909 case 2:
3910 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3911 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3912
3913 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3914 return exp;
3915
3916 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3917 break;
3918
3919 case 3:
3920 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3921 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3922 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3923
3924 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3925 && op2 == TREE_OPERAND (exp, 2))
3926 return exp;
3927
3928 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3929 break;
3930
3931 case 4:
3932 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3933 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3934 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3935 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
3936
3937 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3938 && op2 == TREE_OPERAND (exp, 2)
3939 && op3 == TREE_OPERAND (exp, 3))
3940 return exp;
3941
3942 new_tree
3943 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3944 break;
3945
3946 default:
3947 gcc_unreachable ();
3948 }
3949 break;
3950
3951 case tcc_vl_exp:
3952 {
3953 int i;
3954
3955 new_tree = NULL_TREE;
3956
3957 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3958 {
3959 tree op = TREE_OPERAND (exp, i);
3960 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
3961 if (new_op != op)
3962 {
3963 if (!new_tree)
3964 new_tree = copy_node (exp);
3965 TREE_OPERAND (new_tree, i) = new_op;
3966 }
3967 }
3968
3969 if (new_tree)
3970 {
3971 new_tree = fold (new_tree);
3972 if (TREE_CODE (new_tree) == CALL_EXPR)
3973 process_call_operands (new_tree);
3974 }
3975 else
3976 return exp;
3977 }
3978 break;
3979
3980 default:
3981 gcc_unreachable ();
3982 }
3983
3984 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3985
3986 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3987 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3988
3989 return new_tree;
3990 }
3991 \f
3992
3993 /* Subroutine of stabilize_reference; this is called for subtrees of
3994 references. Any expression with side-effects must be put in a SAVE_EXPR
3995 to ensure that it is only evaluated once.
3996
3997 We don't put SAVE_EXPR nodes around everything, because assigning very
3998 simple expressions to temporaries causes us to miss good opportunities
3999 for optimizations. Among other things, the opportunity to fold in the
4000 addition of a constant into an addressing mode often gets lost, e.g.
4001 "y[i+1] += x;". In general, we take the approach that we should not make
4002 an assignment unless we are forced into it - i.e., that any non-side effect
4003 operator should be allowed, and that cse should take care of coalescing
4004 multiple utterances of the same expression should that prove fruitful. */
4005
4006 static tree
4007 stabilize_reference_1 (tree e)
4008 {
4009 tree result;
4010 enum tree_code code = TREE_CODE (e);
4011
4012 /* We cannot ignore const expressions because it might be a reference
4013 to a const array but whose index contains side-effects. But we can
4014 ignore things that are actual constant or that already have been
4015 handled by this function. */
4016
4017 if (tree_invariant_p (e))
4018 return e;
4019
4020 switch (TREE_CODE_CLASS (code))
4021 {
4022 case tcc_exceptional:
4023 case tcc_type:
4024 case tcc_declaration:
4025 case tcc_comparison:
4026 case tcc_statement:
4027 case tcc_expression:
4028 case tcc_reference:
4029 case tcc_vl_exp:
4030 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4031 so that it will only be evaluated once. */
4032 /* The reference (r) and comparison (<) classes could be handled as
4033 below, but it is generally faster to only evaluate them once. */
4034 if (TREE_SIDE_EFFECTS (e))
4035 return save_expr (e);
4036 return e;
4037
4038 case tcc_constant:
4039 /* Constants need no processing. In fact, we should never reach
4040 here. */
4041 return e;
4042
4043 case tcc_binary:
4044 /* Division is slow and tends to be compiled with jumps,
4045 especially the division by powers of 2 that is often
4046 found inside of an array reference. So do it just once. */
4047 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4048 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4049 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4050 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4051 return save_expr (e);
4052 /* Recursively stabilize each operand. */
4053 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4054 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4055 break;
4056
4057 case tcc_unary:
4058 /* Recursively stabilize each operand. */
4059 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4060 break;
4061
4062 default:
4063 gcc_unreachable ();
4064 }
4065
4066 TREE_TYPE (result) = TREE_TYPE (e);
4067 TREE_READONLY (result) = TREE_READONLY (e);
4068 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4069 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4070
4071 return result;
4072 }
4073
4074 /* Stabilize a reference so that we can use it any number of times
4075 without causing its operands to be evaluated more than once.
4076 Returns the stabilized reference. This works by means of save_expr,
4077 so see the caveats in the comments about save_expr.
4078
4079 Also allows conversion expressions whose operands are references.
4080 Any other kind of expression is returned unchanged. */
4081
4082 tree
4083 stabilize_reference (tree ref)
4084 {
4085 tree result;
4086 enum tree_code code = TREE_CODE (ref);
4087
4088 switch (code)
4089 {
4090 case VAR_DECL:
4091 case PARM_DECL:
4092 case RESULT_DECL:
4093 /* No action is needed in this case. */
4094 return ref;
4095
4096 CASE_CONVERT:
4097 case FLOAT_EXPR:
4098 case FIX_TRUNC_EXPR:
4099 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4100 break;
4101
4102 case INDIRECT_REF:
4103 result = build_nt (INDIRECT_REF,
4104 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4105 break;
4106
4107 case COMPONENT_REF:
4108 result = build_nt (COMPONENT_REF,
4109 stabilize_reference (TREE_OPERAND (ref, 0)),
4110 TREE_OPERAND (ref, 1), NULL_TREE);
4111 break;
4112
4113 case BIT_FIELD_REF:
4114 result = build_nt (BIT_FIELD_REF,
4115 stabilize_reference (TREE_OPERAND (ref, 0)),
4116 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4117 break;
4118
4119 case ARRAY_REF:
4120 result = build_nt (ARRAY_REF,
4121 stabilize_reference (TREE_OPERAND (ref, 0)),
4122 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4123 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4124 break;
4125
4126 case ARRAY_RANGE_REF:
4127 result = build_nt (ARRAY_RANGE_REF,
4128 stabilize_reference (TREE_OPERAND (ref, 0)),
4129 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4130 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4131 break;
4132
4133 case COMPOUND_EXPR:
4134 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4135 it wouldn't be ignored. This matters when dealing with
4136 volatiles. */
4137 return stabilize_reference_1 (ref);
4138
4139 /* If arg isn't a kind of lvalue we recognize, make no change.
4140 Caller should recognize the error for an invalid lvalue. */
4141 default:
4142 return ref;
4143
4144 case ERROR_MARK:
4145 return error_mark_node;
4146 }
4147
4148 TREE_TYPE (result) = TREE_TYPE (ref);
4149 TREE_READONLY (result) = TREE_READONLY (ref);
4150 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4151 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4152
4153 return result;
4154 }
4155 \f
4156 /* Low-level constructors for expressions. */
4157
4158 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4159 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4160
4161 void
4162 recompute_tree_invariant_for_addr_expr (tree t)
4163 {
4164 tree node;
4165 bool tc = true, se = false;
4166
4167 /* We started out assuming this address is both invariant and constant, but
4168 does not have side effects. Now go down any handled components and see if
4169 any of them involve offsets that are either non-constant or non-invariant.
4170 Also check for side-effects.
4171
4172 ??? Note that this code makes no attempt to deal with the case where
4173 taking the address of something causes a copy due to misalignment. */
4174
4175 #define UPDATE_FLAGS(NODE) \
4176 do { tree _node = (NODE); \
4177 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4178 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4179
4180 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4181 node = TREE_OPERAND (node, 0))
4182 {
4183 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4184 array reference (probably made temporarily by the G++ front end),
4185 so ignore all the operands. */
4186 if ((TREE_CODE (node) == ARRAY_REF
4187 || TREE_CODE (node) == ARRAY_RANGE_REF)
4188 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4189 {
4190 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4191 if (TREE_OPERAND (node, 2))
4192 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4193 if (TREE_OPERAND (node, 3))
4194 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4195 }
4196 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4197 FIELD_DECL, apparently. The G++ front end can put something else
4198 there, at least temporarily. */
4199 else if (TREE_CODE (node) == COMPONENT_REF
4200 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4201 {
4202 if (TREE_OPERAND (node, 2))
4203 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4204 }
4205 }
4206
4207 node = lang_hooks.expr_to_decl (node, &tc, &se);
4208
4209 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4210 the address, since &(*a)->b is a form of addition. If it's a constant, the
4211 address is constant too. If it's a decl, its address is constant if the
4212 decl is static. Everything else is not constant and, furthermore,
4213 taking the address of a volatile variable is not volatile. */
4214 if (TREE_CODE (node) == INDIRECT_REF
4215 || TREE_CODE (node) == MEM_REF)
4216 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4217 else if (CONSTANT_CLASS_P (node))
4218 ;
4219 else if (DECL_P (node))
4220 tc &= (staticp (node) != NULL_TREE);
4221 else
4222 {
4223 tc = false;
4224 se |= TREE_SIDE_EFFECTS (node);
4225 }
4226
4227
4228 TREE_CONSTANT (t) = tc;
4229 TREE_SIDE_EFFECTS (t) = se;
4230 #undef UPDATE_FLAGS
4231 }
4232
4233 /* Build an expression of code CODE, data type TYPE, and operands as
4234 specified. Expressions and reference nodes can be created this way.
4235 Constants, decls, types and misc nodes cannot be.
4236
4237 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4238 enough for all extant tree codes. */
4239
4240 tree
4241 build0_stat (enum tree_code code, tree tt MEM_STAT_DECL)
4242 {
4243 tree t;
4244
4245 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4246
4247 t = make_node_stat (code PASS_MEM_STAT);
4248 TREE_TYPE (t) = tt;
4249
4250 return t;
4251 }
4252
4253 tree
4254 build1_stat (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4255 {
4256 int length = sizeof (struct tree_exp);
4257 tree t;
4258
4259 record_node_allocation_statistics (code, length);
4260
4261 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4262
4263 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4264
4265 memset (t, 0, sizeof (struct tree_common));
4266
4267 TREE_SET_CODE (t, code);
4268
4269 TREE_TYPE (t) = type;
4270 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4271 TREE_OPERAND (t, 0) = node;
4272 if (node && !TYPE_P (node))
4273 {
4274 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4275 TREE_READONLY (t) = TREE_READONLY (node);
4276 }
4277
4278 if (TREE_CODE_CLASS (code) == tcc_statement)
4279 TREE_SIDE_EFFECTS (t) = 1;
4280 else switch (code)
4281 {
4282 case VA_ARG_EXPR:
4283 /* All of these have side-effects, no matter what their
4284 operands are. */
4285 TREE_SIDE_EFFECTS (t) = 1;
4286 TREE_READONLY (t) = 0;
4287 break;
4288
4289 case INDIRECT_REF:
4290 /* Whether a dereference is readonly has nothing to do with whether
4291 its operand is readonly. */
4292 TREE_READONLY (t) = 0;
4293 break;
4294
4295 case ADDR_EXPR:
4296 if (node)
4297 recompute_tree_invariant_for_addr_expr (t);
4298 break;
4299
4300 default:
4301 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4302 && node && !TYPE_P (node)
4303 && TREE_CONSTANT (node))
4304 TREE_CONSTANT (t) = 1;
4305 if (TREE_CODE_CLASS (code) == tcc_reference
4306 && node && TREE_THIS_VOLATILE (node))
4307 TREE_THIS_VOLATILE (t) = 1;
4308 break;
4309 }
4310
4311 return t;
4312 }
4313
4314 #define PROCESS_ARG(N) \
4315 do { \
4316 TREE_OPERAND (t, N) = arg##N; \
4317 if (arg##N &&!TYPE_P (arg##N)) \
4318 { \
4319 if (TREE_SIDE_EFFECTS (arg##N)) \
4320 side_effects = 1; \
4321 if (!TREE_READONLY (arg##N) \
4322 && !CONSTANT_CLASS_P (arg##N)) \
4323 (void) (read_only = 0); \
4324 if (!TREE_CONSTANT (arg##N)) \
4325 (void) (constant = 0); \
4326 } \
4327 } while (0)
4328
4329 tree
4330 build2_stat (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4331 {
4332 bool constant, read_only, side_effects;
4333 tree t;
4334
4335 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4336
4337 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4338 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4339 /* When sizetype precision doesn't match that of pointers
4340 we need to be able to build explicit extensions or truncations
4341 of the offset argument. */
4342 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4343 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4344 && TREE_CODE (arg1) == INTEGER_CST);
4345
4346 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4347 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4348 && ptrofftype_p (TREE_TYPE (arg1)));
4349
4350 t = make_node_stat (code PASS_MEM_STAT);
4351 TREE_TYPE (t) = tt;
4352
4353 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4354 result based on those same flags for the arguments. But if the
4355 arguments aren't really even `tree' expressions, we shouldn't be trying
4356 to do this. */
4357
4358 /* Expressions without side effects may be constant if their
4359 arguments are as well. */
4360 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4361 || TREE_CODE_CLASS (code) == tcc_binary);
4362 read_only = 1;
4363 side_effects = TREE_SIDE_EFFECTS (t);
4364
4365 PROCESS_ARG (0);
4366 PROCESS_ARG (1);
4367
4368 TREE_SIDE_EFFECTS (t) = side_effects;
4369 if (code == MEM_REF)
4370 {
4371 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4372 {
4373 tree o = TREE_OPERAND (arg0, 0);
4374 TREE_READONLY (t) = TREE_READONLY (o);
4375 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4376 }
4377 }
4378 else
4379 {
4380 TREE_READONLY (t) = read_only;
4381 TREE_CONSTANT (t) = constant;
4382 TREE_THIS_VOLATILE (t)
4383 = (TREE_CODE_CLASS (code) == tcc_reference
4384 && arg0 && TREE_THIS_VOLATILE (arg0));
4385 }
4386
4387 return t;
4388 }
4389
4390
4391 tree
4392 build3_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4393 tree arg2 MEM_STAT_DECL)
4394 {
4395 bool constant, read_only, side_effects;
4396 tree t;
4397
4398 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4399 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4400
4401 t = make_node_stat (code PASS_MEM_STAT);
4402 TREE_TYPE (t) = tt;
4403
4404 read_only = 1;
4405
4406 /* As a special exception, if COND_EXPR has NULL branches, we
4407 assume that it is a gimple statement and always consider
4408 it to have side effects. */
4409 if (code == COND_EXPR
4410 && tt == void_type_node
4411 && arg1 == NULL_TREE
4412 && arg2 == NULL_TREE)
4413 side_effects = true;
4414 else
4415 side_effects = TREE_SIDE_EFFECTS (t);
4416
4417 PROCESS_ARG (0);
4418 PROCESS_ARG (1);
4419 PROCESS_ARG (2);
4420
4421 if (code == COND_EXPR)
4422 TREE_READONLY (t) = read_only;
4423
4424 TREE_SIDE_EFFECTS (t) = side_effects;
4425 TREE_THIS_VOLATILE (t)
4426 = (TREE_CODE_CLASS (code) == tcc_reference
4427 && arg0 && TREE_THIS_VOLATILE (arg0));
4428
4429 return t;
4430 }
4431
4432 tree
4433 build4_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4434 tree arg2, tree arg3 MEM_STAT_DECL)
4435 {
4436 bool constant, read_only, side_effects;
4437 tree t;
4438
4439 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4440
4441 t = make_node_stat (code PASS_MEM_STAT);
4442 TREE_TYPE (t) = tt;
4443
4444 side_effects = TREE_SIDE_EFFECTS (t);
4445
4446 PROCESS_ARG (0);
4447 PROCESS_ARG (1);
4448 PROCESS_ARG (2);
4449 PROCESS_ARG (3);
4450
4451 TREE_SIDE_EFFECTS (t) = side_effects;
4452 TREE_THIS_VOLATILE (t)
4453 = (TREE_CODE_CLASS (code) == tcc_reference
4454 && arg0 && TREE_THIS_VOLATILE (arg0));
4455
4456 return t;
4457 }
4458
4459 tree
4460 build5_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4461 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4462 {
4463 bool constant, read_only, side_effects;
4464 tree t;
4465
4466 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4467
4468 t = make_node_stat (code PASS_MEM_STAT);
4469 TREE_TYPE (t) = tt;
4470
4471 side_effects = TREE_SIDE_EFFECTS (t);
4472
4473 PROCESS_ARG (0);
4474 PROCESS_ARG (1);
4475 PROCESS_ARG (2);
4476 PROCESS_ARG (3);
4477 PROCESS_ARG (4);
4478
4479 TREE_SIDE_EFFECTS (t) = side_effects;
4480 if (code == TARGET_MEM_REF)
4481 {
4482 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4483 {
4484 tree o = TREE_OPERAND (arg0, 0);
4485 TREE_READONLY (t) = TREE_READONLY (o);
4486 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4487 }
4488 }
4489 else
4490 TREE_THIS_VOLATILE (t)
4491 = (TREE_CODE_CLASS (code) == tcc_reference
4492 && arg0 && TREE_THIS_VOLATILE (arg0));
4493
4494 return t;
4495 }
4496
4497 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4498 on the pointer PTR. */
4499
4500 tree
4501 build_simple_mem_ref_loc (location_t loc, tree ptr)
4502 {
4503 HOST_WIDE_INT offset = 0;
4504 tree ptype = TREE_TYPE (ptr);
4505 tree tem;
4506 /* For convenience allow addresses that collapse to a simple base
4507 and offset. */
4508 if (TREE_CODE (ptr) == ADDR_EXPR
4509 && (handled_component_p (TREE_OPERAND (ptr, 0))
4510 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4511 {
4512 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4513 gcc_assert (ptr);
4514 ptr = build_fold_addr_expr (ptr);
4515 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4516 }
4517 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4518 ptr, build_int_cst (ptype, offset));
4519 SET_EXPR_LOCATION (tem, loc);
4520 return tem;
4521 }
4522
4523 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4524
4525 offset_int
4526 mem_ref_offset (const_tree t)
4527 {
4528 return offset_int::from (TREE_OPERAND (t, 1), SIGNED);
4529 }
4530
4531 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4532 offsetted by OFFSET units. */
4533
4534 tree
4535 build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
4536 {
4537 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4538 build_fold_addr_expr (base),
4539 build_int_cst (ptr_type_node, offset));
4540 tree addr = build1 (ADDR_EXPR, type, ref);
4541 recompute_tree_invariant_for_addr_expr (addr);
4542 return addr;
4543 }
4544
4545 /* Similar except don't specify the TREE_TYPE
4546 and leave the TREE_SIDE_EFFECTS as 0.
4547 It is permissible for arguments to be null,
4548 or even garbage if their values do not matter. */
4549
4550 tree
4551 build_nt (enum tree_code code, ...)
4552 {
4553 tree t;
4554 int length;
4555 int i;
4556 va_list p;
4557
4558 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4559
4560 va_start (p, code);
4561
4562 t = make_node (code);
4563 length = TREE_CODE_LENGTH (code);
4564
4565 for (i = 0; i < length; i++)
4566 TREE_OPERAND (t, i) = va_arg (p, tree);
4567
4568 va_end (p);
4569 return t;
4570 }
4571
4572 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4573 tree vec. */
4574
4575 tree
4576 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4577 {
4578 tree ret, t;
4579 unsigned int ix;
4580
4581 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4582 CALL_EXPR_FN (ret) = fn;
4583 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4584 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4585 CALL_EXPR_ARG (ret, ix) = t;
4586 return ret;
4587 }
4588 \f
4589 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4590 We do NOT enter this node in any sort of symbol table.
4591
4592 LOC is the location of the decl.
4593
4594 layout_decl is used to set up the decl's storage layout.
4595 Other slots are initialized to 0 or null pointers. */
4596
4597 tree
4598 build_decl_stat (location_t loc, enum tree_code code, tree name,
4599 tree type MEM_STAT_DECL)
4600 {
4601 tree t;
4602
4603 t = make_node_stat (code PASS_MEM_STAT);
4604 DECL_SOURCE_LOCATION (t) = loc;
4605
4606 /* if (type == error_mark_node)
4607 type = integer_type_node; */
4608 /* That is not done, deliberately, so that having error_mark_node
4609 as the type can suppress useless errors in the use of this variable. */
4610
4611 DECL_NAME (t) = name;
4612 TREE_TYPE (t) = type;
4613
4614 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4615 layout_decl (t, 0);
4616
4617 return t;
4618 }
4619
4620 /* Builds and returns function declaration with NAME and TYPE. */
4621
4622 tree
4623 build_fn_decl (const char *name, tree type)
4624 {
4625 tree id = get_identifier (name);
4626 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4627
4628 DECL_EXTERNAL (decl) = 1;
4629 TREE_PUBLIC (decl) = 1;
4630 DECL_ARTIFICIAL (decl) = 1;
4631 TREE_NOTHROW (decl) = 1;
4632
4633 return decl;
4634 }
4635
4636 vec<tree, va_gc> *all_translation_units;
4637
4638 /* Builds a new translation-unit decl with name NAME, queues it in the
4639 global list of translation-unit decls and returns it. */
4640
4641 tree
4642 build_translation_unit_decl (tree name)
4643 {
4644 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
4645 name, NULL_TREE);
4646 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
4647 vec_safe_push (all_translation_units, tu);
4648 return tu;
4649 }
4650
4651 \f
4652 /* BLOCK nodes are used to represent the structure of binding contours
4653 and declarations, once those contours have been exited and their contents
4654 compiled. This information is used for outputting debugging info. */
4655
4656 tree
4657 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
4658 {
4659 tree block = make_node (BLOCK);
4660
4661 BLOCK_VARS (block) = vars;
4662 BLOCK_SUBBLOCKS (block) = subblocks;
4663 BLOCK_SUPERCONTEXT (block) = supercontext;
4664 BLOCK_CHAIN (block) = chain;
4665 return block;
4666 }
4667
4668 \f
4669 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4670
4671 LOC is the location to use in tree T. */
4672
4673 void
4674 protected_set_expr_location (tree t, location_t loc)
4675 {
4676 if (CAN_HAVE_LOCATION_P (t))
4677 SET_EXPR_LOCATION (t, loc);
4678 }
4679 \f
4680 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES
4681 is ATTRIBUTE. */
4682
4683 tree
4684 build_decl_attribute_variant (tree ddecl, tree attribute)
4685 {
4686 DECL_ATTRIBUTES (ddecl) = attribute;
4687 return ddecl;
4688 }
4689
4690 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4691 is ATTRIBUTE and its qualifiers are QUALS.
4692
4693 Record such modified types already made so we don't make duplicates. */
4694
4695 tree
4696 build_type_attribute_qual_variant (tree ttype, tree attribute, int quals)
4697 {
4698 if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype), attribute))
4699 {
4700 inchash::hash hstate;
4701 tree ntype;
4702 int i;
4703 tree t;
4704 enum tree_code code = TREE_CODE (ttype);
4705
4706 /* Building a distinct copy of a tagged type is inappropriate; it
4707 causes breakage in code that expects there to be a one-to-one
4708 relationship between a struct and its fields.
4709 build_duplicate_type is another solution (as used in
4710 handle_transparent_union_attribute), but that doesn't play well
4711 with the stronger C++ type identity model. */
4712 if (TREE_CODE (ttype) == RECORD_TYPE
4713 || TREE_CODE (ttype) == UNION_TYPE
4714 || TREE_CODE (ttype) == QUAL_UNION_TYPE
4715 || TREE_CODE (ttype) == ENUMERAL_TYPE)
4716 {
4717 warning (OPT_Wattributes,
4718 "ignoring attributes applied to %qT after definition",
4719 TYPE_MAIN_VARIANT (ttype));
4720 return build_qualified_type (ttype, quals);
4721 }
4722
4723 ttype = build_qualified_type (ttype, TYPE_UNQUALIFIED);
4724 ntype = build_distinct_type_copy (ttype);
4725
4726 TYPE_ATTRIBUTES (ntype) = attribute;
4727
4728 hstate.add_int (code);
4729 if (TREE_TYPE (ntype))
4730 hstate.add_object (TYPE_HASH (TREE_TYPE (ntype)));
4731 attribute_hash_list (attribute, hstate);
4732
4733 switch (TREE_CODE (ntype))
4734 {
4735 case FUNCTION_TYPE:
4736 type_hash_list (TYPE_ARG_TYPES (ntype), hstate);
4737 break;
4738 case ARRAY_TYPE:
4739 if (TYPE_DOMAIN (ntype))
4740 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (ntype)));
4741 break;
4742 case INTEGER_TYPE:
4743 t = TYPE_MAX_VALUE (ntype);
4744 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
4745 hstate.add_object (TREE_INT_CST_ELT (t, i));
4746 break;
4747 case REAL_TYPE:
4748 case FIXED_POINT_TYPE:
4749 {
4750 unsigned int precision = TYPE_PRECISION (ntype);
4751 hstate.add_object (precision);
4752 }
4753 break;
4754 default:
4755 break;
4756 }
4757
4758 ntype = type_hash_canon (hstate.end(), ntype);
4759
4760 /* If the target-dependent attributes make NTYPE different from
4761 its canonical type, we will need to use structural equality
4762 checks for this type. */
4763 if (TYPE_STRUCTURAL_EQUALITY_P (ttype)
4764 || !comp_type_attributes (ntype, ttype))
4765 SET_TYPE_STRUCTURAL_EQUALITY (ntype);
4766 else if (TYPE_CANONICAL (ntype) == ntype)
4767 TYPE_CANONICAL (ntype) = TYPE_CANONICAL (ttype);
4768
4769 ttype = build_qualified_type (ntype, quals);
4770 }
4771 else if (TYPE_QUALS (ttype) != quals)
4772 ttype = build_qualified_type (ttype, quals);
4773
4774 return ttype;
4775 }
4776
4777 /* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are
4778 the same. */
4779
4780 static bool
4781 omp_declare_simd_clauses_equal (tree clauses1, tree clauses2)
4782 {
4783 tree cl1, cl2;
4784 for (cl1 = clauses1, cl2 = clauses2;
4785 cl1 && cl2;
4786 cl1 = OMP_CLAUSE_CHAIN (cl1), cl2 = OMP_CLAUSE_CHAIN (cl2))
4787 {
4788 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_CODE (cl2))
4789 return false;
4790 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_SIMDLEN)
4791 {
4792 if (simple_cst_equal (OMP_CLAUSE_DECL (cl1),
4793 OMP_CLAUSE_DECL (cl2)) != 1)
4794 return false;
4795 }
4796 switch (OMP_CLAUSE_CODE (cl1))
4797 {
4798 case OMP_CLAUSE_ALIGNED:
4799 if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1),
4800 OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2)) != 1)
4801 return false;
4802 break;
4803 case OMP_CLAUSE_LINEAR:
4804 if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1),
4805 OMP_CLAUSE_LINEAR_STEP (cl2)) != 1)
4806 return false;
4807 break;
4808 case OMP_CLAUSE_SIMDLEN:
4809 if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1),
4810 OMP_CLAUSE_SIMDLEN_EXPR (cl2)) != 1)
4811 return false;
4812 default:
4813 break;
4814 }
4815 }
4816 return true;
4817 }
4818
4819 /* Compare two constructor-element-type constants. Return 1 if the lists
4820 are known to be equal; otherwise return 0. */
4821
4822 static bool
4823 simple_cst_list_equal (const_tree l1, const_tree l2)
4824 {
4825 while (l1 != NULL_TREE && l2 != NULL_TREE)
4826 {
4827 if (simple_cst_equal (TREE_VALUE (l1), TREE_VALUE (l2)) != 1)
4828 return false;
4829
4830 l1 = TREE_CHAIN (l1);
4831 l2 = TREE_CHAIN (l2);
4832 }
4833
4834 return l1 == l2;
4835 }
4836
4837 /* Compare two attributes for their value identity. Return true if the
4838 attribute values are known to be equal; otherwise return false.
4839 */
4840
4841 static bool
4842 attribute_value_equal (const_tree attr1, const_tree attr2)
4843 {
4844 if (TREE_VALUE (attr1) == TREE_VALUE (attr2))
4845 return true;
4846
4847 if (TREE_VALUE (attr1) != NULL_TREE
4848 && TREE_CODE (TREE_VALUE (attr1)) == TREE_LIST
4849 && TREE_VALUE (attr2) != NULL
4850 && TREE_CODE (TREE_VALUE (attr2)) == TREE_LIST)
4851 return (simple_cst_list_equal (TREE_VALUE (attr1),
4852 TREE_VALUE (attr2)) == 1);
4853
4854 if ((flag_openmp || flag_openmp_simd)
4855 && TREE_VALUE (attr1) && TREE_VALUE (attr2)
4856 && TREE_CODE (TREE_VALUE (attr1)) == OMP_CLAUSE
4857 && TREE_CODE (TREE_VALUE (attr2)) == OMP_CLAUSE)
4858 return omp_declare_simd_clauses_equal (TREE_VALUE (attr1),
4859 TREE_VALUE (attr2));
4860
4861 return (simple_cst_equal (TREE_VALUE (attr1), TREE_VALUE (attr2)) == 1);
4862 }
4863
4864 /* Return 0 if the attributes for two types are incompatible, 1 if they
4865 are compatible, and 2 if they are nearly compatible (which causes a
4866 warning to be generated). */
4867 int
4868 comp_type_attributes (const_tree type1, const_tree type2)
4869 {
4870 const_tree a1 = TYPE_ATTRIBUTES (type1);
4871 const_tree a2 = TYPE_ATTRIBUTES (type2);
4872 const_tree a;
4873
4874 if (a1 == a2)
4875 return 1;
4876 for (a = a1; a != NULL_TREE; a = TREE_CHAIN (a))
4877 {
4878 const struct attribute_spec *as;
4879 const_tree attr;
4880
4881 as = lookup_attribute_spec (get_attribute_name (a));
4882 if (!as || as->affects_type_identity == false)
4883 continue;
4884
4885 attr = lookup_attribute (as->name, CONST_CAST_TREE (a2));
4886 if (!attr || !attribute_value_equal (a, attr))
4887 break;
4888 }
4889 if (!a)
4890 {
4891 for (a = a2; a != NULL_TREE; a = TREE_CHAIN (a))
4892 {
4893 const struct attribute_spec *as;
4894
4895 as = lookup_attribute_spec (get_attribute_name (a));
4896 if (!as || as->affects_type_identity == false)
4897 continue;
4898
4899 if (!lookup_attribute (as->name, CONST_CAST_TREE (a1)))
4900 break;
4901 /* We don't need to compare trees again, as we did this
4902 already in first loop. */
4903 }
4904 /* All types - affecting identity - are equal, so
4905 there is no need to call target hook for comparison. */
4906 if (!a)
4907 return 1;
4908 }
4909 /* As some type combinations - like default calling-convention - might
4910 be compatible, we have to call the target hook to get the final result. */
4911 return targetm.comp_type_attributes (type1, type2);
4912 }
4913
4914 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4915 is ATTRIBUTE.
4916
4917 Record such modified types already made so we don't make duplicates. */
4918
4919 tree
4920 build_type_attribute_variant (tree ttype, tree attribute)
4921 {
4922 return build_type_attribute_qual_variant (ttype, attribute,
4923 TYPE_QUALS (ttype));
4924 }
4925
4926
4927 /* Reset the expression *EXPR_P, a size or position.
4928
4929 ??? We could reset all non-constant sizes or positions. But it's cheap
4930 enough to not do so and refrain from adding workarounds to dwarf2out.c.
4931
4932 We need to reset self-referential sizes or positions because they cannot
4933 be gimplified and thus can contain a CALL_EXPR after the gimplification
4934 is finished, which will run afoul of LTO streaming. And they need to be
4935 reset to something essentially dummy but not constant, so as to preserve
4936 the properties of the object they are attached to. */
4937
4938 static inline void
4939 free_lang_data_in_one_sizepos (tree *expr_p)
4940 {
4941 tree expr = *expr_p;
4942 if (CONTAINS_PLACEHOLDER_P (expr))
4943 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
4944 }
4945
4946
4947 /* Reset all the fields in a binfo node BINFO. We only keep
4948 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
4949
4950 static void
4951 free_lang_data_in_binfo (tree binfo)
4952 {
4953 unsigned i;
4954 tree t;
4955
4956 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
4957
4958 BINFO_VIRTUALS (binfo) = NULL_TREE;
4959 BINFO_BASE_ACCESSES (binfo) = NULL;
4960 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
4961 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
4962
4963 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
4964 free_lang_data_in_binfo (t);
4965 }
4966
4967
4968 /* Reset all language specific information still present in TYPE. */
4969
4970 static void
4971 free_lang_data_in_type (tree type)
4972 {
4973 gcc_assert (TYPE_P (type));
4974
4975 /* Give the FE a chance to remove its own data first. */
4976 lang_hooks.free_lang_data (type);
4977
4978 TREE_LANG_FLAG_0 (type) = 0;
4979 TREE_LANG_FLAG_1 (type) = 0;
4980 TREE_LANG_FLAG_2 (type) = 0;
4981 TREE_LANG_FLAG_3 (type) = 0;
4982 TREE_LANG_FLAG_4 (type) = 0;
4983 TREE_LANG_FLAG_5 (type) = 0;
4984 TREE_LANG_FLAG_6 (type) = 0;
4985
4986 if (TREE_CODE (type) == FUNCTION_TYPE)
4987 {
4988 /* Remove the const and volatile qualifiers from arguments. The
4989 C++ front end removes them, but the C front end does not,
4990 leading to false ODR violation errors when merging two
4991 instances of the same function signature compiled by
4992 different front ends. */
4993 tree p;
4994
4995 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
4996 {
4997 tree arg_type = TREE_VALUE (p);
4998
4999 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
5000 {
5001 int quals = TYPE_QUALS (arg_type)
5002 & ~TYPE_QUAL_CONST
5003 & ~TYPE_QUAL_VOLATILE;
5004 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
5005 free_lang_data_in_type (TREE_VALUE (p));
5006 }
5007 }
5008 }
5009
5010 /* Remove members that are not actually FIELD_DECLs from the field
5011 list of an aggregate. These occur in C++. */
5012 if (RECORD_OR_UNION_TYPE_P (type))
5013 {
5014 tree prev, member;
5015
5016 /* Note that TYPE_FIELDS can be shared across distinct
5017 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is
5018 to be removed, we cannot set its TREE_CHAIN to NULL.
5019 Otherwise, we would not be able to find all the other fields
5020 in the other instances of this TREE_TYPE.
5021
5022 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */
5023 prev = NULL_TREE;
5024 member = TYPE_FIELDS (type);
5025 while (member)
5026 {
5027 if (TREE_CODE (member) == FIELD_DECL
5028 || TREE_CODE (member) == TYPE_DECL)
5029 {
5030 if (prev)
5031 TREE_CHAIN (prev) = member;
5032 else
5033 TYPE_FIELDS (type) = member;
5034 prev = member;
5035 }
5036
5037 member = TREE_CHAIN (member);
5038 }
5039
5040 if (prev)
5041 TREE_CHAIN (prev) = NULL_TREE;
5042 else
5043 TYPE_FIELDS (type) = NULL_TREE;
5044
5045 TYPE_METHODS (type) = NULL_TREE;
5046 if (TYPE_BINFO (type))
5047 {
5048 free_lang_data_in_binfo (TYPE_BINFO (type));
5049 if ((!BINFO_VTABLE (TYPE_BINFO (type))
5050 || !flag_devirtualize)
5051 && (!BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
5052 || debug_info_level != DINFO_LEVEL_NONE))
5053 TYPE_BINFO (type) = NULL;
5054 }
5055 }
5056 else
5057 {
5058 /* For non-aggregate types, clear out the language slot (which
5059 overloads TYPE_BINFO). */
5060 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
5061
5062 if (INTEGRAL_TYPE_P (type)
5063 || SCALAR_FLOAT_TYPE_P (type)
5064 || FIXED_POINT_TYPE_P (type))
5065 {
5066 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
5067 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
5068 }
5069 }
5070
5071 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
5072 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
5073
5074 if (TYPE_CONTEXT (type)
5075 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
5076 {
5077 tree ctx = TYPE_CONTEXT (type);
5078 do
5079 {
5080 ctx = BLOCK_SUPERCONTEXT (ctx);
5081 }
5082 while (ctx && TREE_CODE (ctx) == BLOCK);
5083 TYPE_CONTEXT (type) = ctx;
5084 }
5085 }
5086
5087
5088 /* Return true if DECL may need an assembler name to be set. */
5089
5090 static inline bool
5091 need_assembler_name_p (tree decl)
5092 {
5093 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition Rule
5094 merging. */
5095 if (flag_lto_odr_type_mering
5096 && TREE_CODE (decl) == TYPE_DECL
5097 && DECL_NAME (decl)
5098 && decl == TYPE_NAME (TREE_TYPE (decl))
5099 && !is_lang_specific (TREE_TYPE (decl))
5100 && AGGREGATE_TYPE_P (TREE_TYPE (decl))
5101 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE)
5102 && !type_in_anonymous_namespace_p (TREE_TYPE (decl)))
5103 return !DECL_ASSEMBLER_NAME_SET_P (decl);
5104 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5105 if (TREE_CODE (decl) != FUNCTION_DECL
5106 && TREE_CODE (decl) != VAR_DECL)
5107 return false;
5108
5109 /* If DECL already has its assembler name set, it does not need a
5110 new one. */
5111 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5112 || DECL_ASSEMBLER_NAME_SET_P (decl))
5113 return false;
5114
5115 /* Abstract decls do not need an assembler name. */
5116 if (DECL_ABSTRACT_P (decl))
5117 return false;
5118
5119 /* For VAR_DECLs, only static, public and external symbols need an
5120 assembler name. */
5121 if (TREE_CODE (decl) == VAR_DECL
5122 && !TREE_STATIC (decl)
5123 && !TREE_PUBLIC (decl)
5124 && !DECL_EXTERNAL (decl))
5125 return false;
5126
5127 if (TREE_CODE (decl) == FUNCTION_DECL)
5128 {
5129 /* Do not set assembler name on builtins. Allow RTL expansion to
5130 decide whether to expand inline or via a regular call. */
5131 if (DECL_BUILT_IN (decl)
5132 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5133 return false;
5134
5135 /* Functions represented in the callgraph need an assembler name. */
5136 if (cgraph_node::get (decl) != NULL)
5137 return true;
5138
5139 /* Unused and not public functions don't need an assembler name. */
5140 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5141 return false;
5142 }
5143
5144 return true;
5145 }
5146
5147
5148 /* Reset all language specific information still present in symbol
5149 DECL. */
5150
5151 static void
5152 free_lang_data_in_decl (tree decl)
5153 {
5154 gcc_assert (DECL_P (decl));
5155
5156 /* Give the FE a chance to remove its own data first. */
5157 lang_hooks.free_lang_data (decl);
5158
5159 TREE_LANG_FLAG_0 (decl) = 0;
5160 TREE_LANG_FLAG_1 (decl) = 0;
5161 TREE_LANG_FLAG_2 (decl) = 0;
5162 TREE_LANG_FLAG_3 (decl) = 0;
5163 TREE_LANG_FLAG_4 (decl) = 0;
5164 TREE_LANG_FLAG_5 (decl) = 0;
5165 TREE_LANG_FLAG_6 (decl) = 0;
5166
5167 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5168 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5169 if (TREE_CODE (decl) == FIELD_DECL)
5170 {
5171 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5172 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5173 DECL_QUALIFIER (decl) = NULL_TREE;
5174 }
5175
5176 if (TREE_CODE (decl) == FUNCTION_DECL)
5177 {
5178 struct cgraph_node *node;
5179 if (!(node = cgraph_node::get (decl))
5180 || (!node->definition && !node->clones))
5181 {
5182 if (node)
5183 node->release_body ();
5184 else
5185 {
5186 release_function_body (decl);
5187 DECL_ARGUMENTS (decl) = NULL;
5188 DECL_RESULT (decl) = NULL;
5189 DECL_INITIAL (decl) = error_mark_node;
5190 }
5191 }
5192 if (gimple_has_body_p (decl))
5193 {
5194 tree t;
5195
5196 /* If DECL has a gimple body, then the context for its
5197 arguments must be DECL. Otherwise, it doesn't really
5198 matter, as we will not be emitting any code for DECL. In
5199 general, there may be other instances of DECL created by
5200 the front end and since PARM_DECLs are generally shared,
5201 their DECL_CONTEXT changes as the replicas of DECL are
5202 created. The only time where DECL_CONTEXT is important
5203 is for the FUNCTION_DECLs that have a gimple body (since
5204 the PARM_DECL will be used in the function's body). */
5205 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5206 DECL_CONTEXT (t) = decl;
5207 if (!DECL_FUNCTION_SPECIFIC_TARGET (decl))
5208 DECL_FUNCTION_SPECIFIC_TARGET (decl)
5209 = target_option_default_node;
5210 if (!DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl))
5211 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
5212 = optimization_default_node;
5213 }
5214
5215 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5216 At this point, it is not needed anymore. */
5217 DECL_SAVED_TREE (decl) = NULL_TREE;
5218
5219 /* Clear the abstract origin if it refers to a method. Otherwise
5220 dwarf2out.c will ICE as we clear TYPE_METHODS and thus the
5221 origin will not be output correctly. */
5222 if (DECL_ABSTRACT_ORIGIN (decl)
5223 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5224 && RECORD_OR_UNION_TYPE_P
5225 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5226 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5227
5228 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5229 DECL_VINDEX referring to itself into a vtable slot number as it
5230 should. Happens with functions that are copied and then forgotten
5231 about. Just clear it, it won't matter anymore. */
5232 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl)))
5233 DECL_VINDEX (decl) = NULL_TREE;
5234 }
5235 else if (TREE_CODE (decl) == VAR_DECL)
5236 {
5237 if ((DECL_EXTERNAL (decl)
5238 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5239 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5240 DECL_INITIAL (decl) = NULL_TREE;
5241 }
5242 else if (TREE_CODE (decl) == TYPE_DECL
5243 || TREE_CODE (decl) == FIELD_DECL)
5244 DECL_INITIAL (decl) = NULL_TREE;
5245 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5246 && DECL_INITIAL (decl)
5247 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5248 {
5249 /* Strip builtins from the translation-unit BLOCK. We still have targets
5250 without builtin_decl_explicit support and also builtins are shared
5251 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5252 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5253 while (*nextp)
5254 {
5255 tree var = *nextp;
5256 if (TREE_CODE (var) == FUNCTION_DECL
5257 && DECL_BUILT_IN (var))
5258 *nextp = TREE_CHAIN (var);
5259 else
5260 nextp = &TREE_CHAIN (var);
5261 }
5262 }
5263 }
5264
5265
5266 /* Data used when collecting DECLs and TYPEs for language data removal. */
5267
5268 struct free_lang_data_d
5269 {
5270 /* Worklist to avoid excessive recursion. */
5271 vec<tree> worklist;
5272
5273 /* Set of traversed objects. Used to avoid duplicate visits. */
5274 hash_set<tree> *pset;
5275
5276 /* Array of symbols to process with free_lang_data_in_decl. */
5277 vec<tree> decls;
5278
5279 /* Array of types to process with free_lang_data_in_type. */
5280 vec<tree> types;
5281 };
5282
5283
5284 /* Save all language fields needed to generate proper debug information
5285 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5286
5287 static void
5288 save_debug_info_for_decl (tree t)
5289 {
5290 /*struct saved_debug_info_d *sdi;*/
5291
5292 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t));
5293
5294 /* FIXME. Partial implementation for saving debug info removed. */
5295 }
5296
5297
5298 /* Save all language fields needed to generate proper debug information
5299 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5300
5301 static void
5302 save_debug_info_for_type (tree t)
5303 {
5304 /*struct saved_debug_info_d *sdi;*/
5305
5306 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t));
5307
5308 /* FIXME. Partial implementation for saving debug info removed. */
5309 }
5310
5311
5312 /* Add type or decl T to one of the list of tree nodes that need their
5313 language data removed. The lists are held inside FLD. */
5314
5315 static void
5316 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5317 {
5318 if (DECL_P (t))
5319 {
5320 fld->decls.safe_push (t);
5321 if (debug_info_level > DINFO_LEVEL_TERSE)
5322 save_debug_info_for_decl (t);
5323 }
5324 else if (TYPE_P (t))
5325 {
5326 fld->types.safe_push (t);
5327 if (debug_info_level > DINFO_LEVEL_TERSE)
5328 save_debug_info_for_type (t);
5329 }
5330 else
5331 gcc_unreachable ();
5332 }
5333
5334 /* Push tree node T into FLD->WORKLIST. */
5335
5336 static inline void
5337 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5338 {
5339 if (t && !is_lang_specific (t) && !fld->pset->contains (t))
5340 fld->worklist.safe_push ((t));
5341 }
5342
5343
5344 /* Operand callback helper for free_lang_data_in_node. *TP is the
5345 subtree operand being considered. */
5346
5347 static tree
5348 find_decls_types_r (tree *tp, int *ws, void *data)
5349 {
5350 tree t = *tp;
5351 struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5352
5353 if (TREE_CODE (t) == TREE_LIST)
5354 return NULL_TREE;
5355
5356 /* Language specific nodes will be removed, so there is no need
5357 to gather anything under them. */
5358 if (is_lang_specific (t))
5359 {
5360 *ws = 0;
5361 return NULL_TREE;
5362 }
5363
5364 if (DECL_P (t))
5365 {
5366 /* Note that walk_tree does not traverse every possible field in
5367 decls, so we have to do our own traversals here. */
5368 add_tree_to_fld_list (t, fld);
5369
5370 fld_worklist_push (DECL_NAME (t), fld);
5371 fld_worklist_push (DECL_CONTEXT (t), fld);
5372 fld_worklist_push (DECL_SIZE (t), fld);
5373 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5374
5375 /* We are going to remove everything under DECL_INITIAL for
5376 TYPE_DECLs. No point walking them. */
5377 if (TREE_CODE (t) != TYPE_DECL)
5378 fld_worklist_push (DECL_INITIAL (t), fld);
5379
5380 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5381 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5382
5383 if (TREE_CODE (t) == FUNCTION_DECL)
5384 {
5385 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5386 fld_worklist_push (DECL_RESULT (t), fld);
5387 }
5388 else if (TREE_CODE (t) == TYPE_DECL)
5389 {
5390 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
5391 }
5392 else if (TREE_CODE (t) == FIELD_DECL)
5393 {
5394 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5395 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5396 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5397 fld_worklist_push (DECL_FCONTEXT (t), fld);
5398 }
5399
5400 if ((TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
5401 && DECL_HAS_VALUE_EXPR_P (t))
5402 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5403
5404 if (TREE_CODE (t) != FIELD_DECL
5405 && TREE_CODE (t) != TYPE_DECL)
5406 fld_worklist_push (TREE_CHAIN (t), fld);
5407 *ws = 0;
5408 }
5409 else if (TYPE_P (t))
5410 {
5411 /* Note that walk_tree does not traverse every possible field in
5412 types, so we have to do our own traversals here. */
5413 add_tree_to_fld_list (t, fld);
5414
5415 if (!RECORD_OR_UNION_TYPE_P (t))
5416 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5417 fld_worklist_push (TYPE_SIZE (t), fld);
5418 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5419 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5420 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5421 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5422 fld_worklist_push (TYPE_NAME (t), fld);
5423 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5424 them and thus do not and want not to reach unused pointer types
5425 this way. */
5426 if (!POINTER_TYPE_P (t))
5427 fld_worklist_push (TYPE_MINVAL (t), fld);
5428 if (!RECORD_OR_UNION_TYPE_P (t))
5429 fld_worklist_push (TYPE_MAXVAL (t), fld);
5430 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5431 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5432 do not and want not to reach unused variants this way. */
5433 if (TYPE_CONTEXT (t))
5434 {
5435 tree ctx = TYPE_CONTEXT (t);
5436 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5437 So push that instead. */
5438 while (ctx && TREE_CODE (ctx) == BLOCK)
5439 ctx = BLOCK_SUPERCONTEXT (ctx);
5440 fld_worklist_push (ctx, fld);
5441 }
5442 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5443 and want not to reach unused types this way. */
5444
5445 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5446 {
5447 unsigned i;
5448 tree tem;
5449 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5450 fld_worklist_push (TREE_TYPE (tem), fld);
5451 tem = BINFO_VIRTUALS (TYPE_BINFO (t));
5452 if (tem
5453 /* The Java FE overloads BINFO_VIRTUALS for its own purpose. */
5454 && TREE_CODE (tem) == TREE_LIST)
5455 do
5456 {
5457 fld_worklist_push (TREE_VALUE (tem), fld);
5458 tem = TREE_CHAIN (tem);
5459 }
5460 while (tem);
5461 }
5462 if (RECORD_OR_UNION_TYPE_P (t))
5463 {
5464 tree tem;
5465 /* Push all TYPE_FIELDS - there can be interleaving interesting
5466 and non-interesting things. */
5467 tem = TYPE_FIELDS (t);
5468 while (tem)
5469 {
5470 if (TREE_CODE (tem) == FIELD_DECL
5471 || TREE_CODE (tem) == TYPE_DECL)
5472 fld_worklist_push (tem, fld);
5473 tem = TREE_CHAIN (tem);
5474 }
5475 }
5476
5477 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5478 *ws = 0;
5479 }
5480 else if (TREE_CODE (t) == BLOCK)
5481 {
5482 tree tem;
5483 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
5484 fld_worklist_push (tem, fld);
5485 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5486 fld_worklist_push (tem, fld);
5487 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5488 }
5489
5490 if (TREE_CODE (t) != IDENTIFIER_NODE
5491 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5492 fld_worklist_push (TREE_TYPE (t), fld);
5493
5494 return NULL_TREE;
5495 }
5496
5497
5498 /* Find decls and types in T. */
5499
5500 static void
5501 find_decls_types (tree t, struct free_lang_data_d *fld)
5502 {
5503 while (1)
5504 {
5505 if (!fld->pset->contains (t))
5506 walk_tree (&t, find_decls_types_r, fld, fld->pset);
5507 if (fld->worklist.is_empty ())
5508 break;
5509 t = fld->worklist.pop ();
5510 }
5511 }
5512
5513 /* Translate all the types in LIST with the corresponding runtime
5514 types. */
5515
5516 static tree
5517 get_eh_types_for_runtime (tree list)
5518 {
5519 tree head, prev;
5520
5521 if (list == NULL_TREE)
5522 return NULL_TREE;
5523
5524 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5525 prev = head;
5526 list = TREE_CHAIN (list);
5527 while (list)
5528 {
5529 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5530 TREE_CHAIN (prev) = n;
5531 prev = TREE_CHAIN (prev);
5532 list = TREE_CHAIN (list);
5533 }
5534
5535 return head;
5536 }
5537
5538
5539 /* Find decls and types referenced in EH region R and store them in
5540 FLD->DECLS and FLD->TYPES. */
5541
5542 static void
5543 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5544 {
5545 switch (r->type)
5546 {
5547 case ERT_CLEANUP:
5548 break;
5549
5550 case ERT_TRY:
5551 {
5552 eh_catch c;
5553
5554 /* The types referenced in each catch must first be changed to the
5555 EH types used at runtime. This removes references to FE types
5556 in the region. */
5557 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5558 {
5559 c->type_list = get_eh_types_for_runtime (c->type_list);
5560 walk_tree (&c->type_list, find_decls_types_r, fld, fld->pset);
5561 }
5562 }
5563 break;
5564
5565 case ERT_ALLOWED_EXCEPTIONS:
5566 r->u.allowed.type_list
5567 = get_eh_types_for_runtime (r->u.allowed.type_list);
5568 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, fld->pset);
5569 break;
5570
5571 case ERT_MUST_NOT_THROW:
5572 walk_tree (&r->u.must_not_throw.failure_decl,
5573 find_decls_types_r, fld, fld->pset);
5574 break;
5575 }
5576 }
5577
5578
5579 /* Find decls and types referenced in cgraph node N and store them in
5580 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5581 look for *every* kind of DECL and TYPE node reachable from N,
5582 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5583 NAMESPACE_DECLs, etc). */
5584
5585 static void
5586 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5587 {
5588 basic_block bb;
5589 struct function *fn;
5590 unsigned ix;
5591 tree t;
5592
5593 find_decls_types (n->decl, fld);
5594
5595 if (!gimple_has_body_p (n->decl))
5596 return;
5597
5598 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5599
5600 fn = DECL_STRUCT_FUNCTION (n->decl);
5601
5602 /* Traverse locals. */
5603 FOR_EACH_LOCAL_DECL (fn, ix, t)
5604 find_decls_types (t, fld);
5605
5606 /* Traverse EH regions in FN. */
5607 {
5608 eh_region r;
5609 FOR_ALL_EH_REGION_FN (r, fn)
5610 find_decls_types_in_eh_region (r, fld);
5611 }
5612
5613 /* Traverse every statement in FN. */
5614 FOR_EACH_BB_FN (bb, fn)
5615 {
5616 gphi_iterator psi;
5617 gimple_stmt_iterator si;
5618 unsigned i;
5619
5620 for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
5621 {
5622 gphi *phi = psi.phi ();
5623
5624 for (i = 0; i < gimple_phi_num_args (phi); i++)
5625 {
5626 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5627 find_decls_types (*arg_p, fld);
5628 }
5629 }
5630
5631 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5632 {
5633 gimple stmt = gsi_stmt (si);
5634
5635 if (is_gimple_call (stmt))
5636 find_decls_types (gimple_call_fntype (stmt), fld);
5637
5638 for (i = 0; i < gimple_num_ops (stmt); i++)
5639 {
5640 tree arg = gimple_op (stmt, i);
5641 find_decls_types (arg, fld);
5642 }
5643 }
5644 }
5645 }
5646
5647
5648 /* Find decls and types referenced in varpool node N and store them in
5649 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5650 look for *every* kind of DECL and TYPE node reachable from N,
5651 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5652 NAMESPACE_DECLs, etc). */
5653
5654 static void
5655 find_decls_types_in_var (varpool_node *v, struct free_lang_data_d *fld)
5656 {
5657 find_decls_types (v->decl, fld);
5658 }
5659
5660 /* If T needs an assembler name, have one created for it. */
5661
5662 void
5663 assign_assembler_name_if_neeeded (tree t)
5664 {
5665 if (need_assembler_name_p (t))
5666 {
5667 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5668 diagnostics that use input_location to show locus
5669 information. The problem here is that, at this point,
5670 input_location is generally anchored to the end of the file
5671 (since the parser is long gone), so we don't have a good
5672 position to pin it to.
5673
5674 To alleviate this problem, this uses the location of T's
5675 declaration. Examples of this are
5676 testsuite/g++.dg/template/cond2.C and
5677 testsuite/g++.dg/template/pr35240.C. */
5678 location_t saved_location = input_location;
5679 input_location = DECL_SOURCE_LOCATION (t);
5680
5681 decl_assembler_name (t);
5682
5683 input_location = saved_location;
5684 }
5685 }
5686
5687
5688 /* Free language specific information for every operand and expression
5689 in every node of the call graph. This process operates in three stages:
5690
5691 1- Every callgraph node and varpool node is traversed looking for
5692 decls and types embedded in them. This is a more exhaustive
5693 search than that done by find_referenced_vars, because it will
5694 also collect individual fields, decls embedded in types, etc.
5695
5696 2- All the decls found are sent to free_lang_data_in_decl.
5697
5698 3- All the types found are sent to free_lang_data_in_type.
5699
5700 The ordering between decls and types is important because
5701 free_lang_data_in_decl sets assembler names, which includes
5702 mangling. So types cannot be freed up until assembler names have
5703 been set up. */
5704
5705 static void
5706 free_lang_data_in_cgraph (void)
5707 {
5708 struct cgraph_node *n;
5709 varpool_node *v;
5710 struct free_lang_data_d fld;
5711 tree t;
5712 unsigned i;
5713 alias_pair *p;
5714
5715 /* Initialize sets and arrays to store referenced decls and types. */
5716 fld.pset = new hash_set<tree>;
5717 fld.worklist.create (0);
5718 fld.decls.create (100);
5719 fld.types.create (100);
5720
5721 /* Find decls and types in the body of every function in the callgraph. */
5722 FOR_EACH_FUNCTION (n)
5723 find_decls_types_in_node (n, &fld);
5724
5725 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
5726 find_decls_types (p->decl, &fld);
5727
5728 /* Find decls and types in every varpool symbol. */
5729 FOR_EACH_VARIABLE (v)
5730 find_decls_types_in_var (v, &fld);
5731
5732 /* Set the assembler name on every decl found. We need to do this
5733 now because free_lang_data_in_decl will invalidate data needed
5734 for mangling. This breaks mangling on interdependent decls. */
5735 FOR_EACH_VEC_ELT (fld.decls, i, t)
5736 assign_assembler_name_if_neeeded (t);
5737
5738 /* Traverse every decl found freeing its language data. */
5739 FOR_EACH_VEC_ELT (fld.decls, i, t)
5740 free_lang_data_in_decl (t);
5741
5742 /* Traverse every type found freeing its language data. */
5743 FOR_EACH_VEC_ELT (fld.types, i, t)
5744 free_lang_data_in_type (t);
5745
5746 delete fld.pset;
5747 fld.worklist.release ();
5748 fld.decls.release ();
5749 fld.types.release ();
5750 }
5751
5752
5753 /* Free resources that are used by FE but are not needed once they are done. */
5754
5755 static unsigned
5756 free_lang_data (void)
5757 {
5758 unsigned i;
5759
5760 /* If we are the LTO frontend we have freed lang-specific data already. */
5761 if (in_lto_p
5762 || (!flag_generate_lto && !flag_generate_offload))
5763 return 0;
5764
5765 /* Allocate and assign alias sets to the standard integer types
5766 while the slots are still in the way the frontends generated them. */
5767 for (i = 0; i < itk_none; ++i)
5768 if (integer_types[i])
5769 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
5770
5771 /* Traverse the IL resetting language specific information for
5772 operands, expressions, etc. */
5773 free_lang_data_in_cgraph ();
5774
5775 /* Create gimple variants for common types. */
5776 ptrdiff_type_node = integer_type_node;
5777 fileptr_type_node = ptr_type_node;
5778
5779 /* Reset some langhooks. Do not reset types_compatible_p, it may
5780 still be used indirectly via the get_alias_set langhook. */
5781 lang_hooks.dwarf_name = lhd_dwarf_name;
5782 lang_hooks.decl_printable_name = gimple_decl_printable_name;
5783 /* We do not want the default decl_assembler_name implementation,
5784 rather if we have fixed everything we want a wrapper around it
5785 asserting that all non-local symbols already got their assembler
5786 name and only produce assembler names for local symbols. Or rather
5787 make sure we never call decl_assembler_name on local symbols and
5788 devise a separate, middle-end private scheme for it. */
5789
5790 /* Reset diagnostic machinery. */
5791 tree_diagnostics_defaults (global_dc);
5792
5793 return 0;
5794 }
5795
5796
5797 namespace {
5798
5799 const pass_data pass_data_ipa_free_lang_data =
5800 {
5801 SIMPLE_IPA_PASS, /* type */
5802 "*free_lang_data", /* name */
5803 OPTGROUP_NONE, /* optinfo_flags */
5804 TV_IPA_FREE_LANG_DATA, /* tv_id */
5805 0, /* properties_required */
5806 0, /* properties_provided */
5807 0, /* properties_destroyed */
5808 0, /* todo_flags_start */
5809 0, /* todo_flags_finish */
5810 };
5811
5812 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
5813 {
5814 public:
5815 pass_ipa_free_lang_data (gcc::context *ctxt)
5816 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
5817 {}
5818
5819 /* opt_pass methods: */
5820 virtual unsigned int execute (function *) { return free_lang_data (); }
5821
5822 }; // class pass_ipa_free_lang_data
5823
5824 } // anon namespace
5825
5826 simple_ipa_opt_pass *
5827 make_pass_ipa_free_lang_data (gcc::context *ctxt)
5828 {
5829 return new pass_ipa_free_lang_data (ctxt);
5830 }
5831
5832 /* The backbone of is_attribute_p(). ATTR_LEN is the string length of
5833 ATTR_NAME. Also used internally by remove_attribute(). */
5834 bool
5835 private_is_attribute_p (const char *attr_name, size_t attr_len, const_tree ident)
5836 {
5837 size_t ident_len = IDENTIFIER_LENGTH (ident);
5838
5839 if (ident_len == attr_len)
5840 {
5841 if (strcmp (attr_name, IDENTIFIER_POINTER (ident)) == 0)
5842 return true;
5843 }
5844 else if (ident_len == attr_len + 4)
5845 {
5846 /* There is the possibility that ATTR is 'text' and IDENT is
5847 '__text__'. */
5848 const char *p = IDENTIFIER_POINTER (ident);
5849 if (p[0] == '_' && p[1] == '_'
5850 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5851 && strncmp (attr_name, p + 2, attr_len) == 0)
5852 return true;
5853 }
5854
5855 return false;
5856 }
5857
5858 /* The backbone of lookup_attribute(). ATTR_LEN is the string length
5859 of ATTR_NAME, and LIST is not NULL_TREE. */
5860 tree
5861 private_lookup_attribute (const char *attr_name, size_t attr_len, tree list)
5862 {
5863 while (list)
5864 {
5865 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5866
5867 if (ident_len == attr_len)
5868 {
5869 if (!strcmp (attr_name,
5870 IDENTIFIER_POINTER (get_attribute_name (list))))
5871 break;
5872 }
5873 /* TODO: If we made sure that attributes were stored in the
5874 canonical form without '__...__' (ie, as in 'text' as opposed
5875 to '__text__') then we could avoid the following case. */
5876 else if (ident_len == attr_len + 4)
5877 {
5878 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5879 if (p[0] == '_' && p[1] == '_'
5880 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5881 && strncmp (attr_name, p + 2, attr_len) == 0)
5882 break;
5883 }
5884 list = TREE_CHAIN (list);
5885 }
5886
5887 return list;
5888 }
5889
5890 /* Given an attribute name ATTR_NAME and a list of attributes LIST,
5891 return a pointer to the attribute's list first element if the attribute
5892 starts with ATTR_NAME. ATTR_NAME must be in the form 'text' (not
5893 '__text__'). */
5894
5895 tree
5896 private_lookup_attribute_by_prefix (const char *attr_name, size_t attr_len,
5897 tree list)
5898 {
5899 while (list)
5900 {
5901 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5902
5903 if (attr_len > ident_len)
5904 {
5905 list = TREE_CHAIN (list);
5906 continue;
5907 }
5908
5909 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5910
5911 if (strncmp (attr_name, p, attr_len) == 0)
5912 break;
5913
5914 /* TODO: If we made sure that attributes were stored in the
5915 canonical form without '__...__' (ie, as in 'text' as opposed
5916 to '__text__') then we could avoid the following case. */
5917 if (p[0] == '_' && p[1] == '_' &&
5918 strncmp (attr_name, p + 2, attr_len) == 0)
5919 break;
5920
5921 list = TREE_CHAIN (list);
5922 }
5923
5924 return list;
5925 }
5926
5927
5928 /* A variant of lookup_attribute() that can be used with an identifier
5929 as the first argument, and where the identifier can be either
5930 'text' or '__text__'.
5931
5932 Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST,
5933 return a pointer to the attribute's list element if the attribute
5934 is part of the list, or NULL_TREE if not found. If the attribute
5935 appears more than once, this only returns the first occurrence; the
5936 TREE_CHAIN of the return value should be passed back in if further
5937 occurrences are wanted. ATTR_IDENTIFIER must be an identifier but
5938 can be in the form 'text' or '__text__'. */
5939 static tree
5940 lookup_ident_attribute (tree attr_identifier, tree list)
5941 {
5942 gcc_checking_assert (TREE_CODE (attr_identifier) == IDENTIFIER_NODE);
5943
5944 while (list)
5945 {
5946 gcc_checking_assert (TREE_CODE (get_attribute_name (list))
5947 == IDENTIFIER_NODE);
5948
5949 /* Identifiers can be compared directly for equality. */
5950 if (attr_identifier == get_attribute_name (list))
5951 break;
5952
5953 /* If they are not equal, they may still be one in the form
5954 'text' while the other one is in the form '__text__'. TODO:
5955 If we were storing attributes in normalized 'text' form, then
5956 this could all go away and we could take full advantage of
5957 the fact that we're comparing identifiers. :-) */
5958 {
5959 size_t attr_len = IDENTIFIER_LENGTH (attr_identifier);
5960 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5961
5962 if (ident_len == attr_len + 4)
5963 {
5964 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5965 const char *q = IDENTIFIER_POINTER (attr_identifier);
5966 if (p[0] == '_' && p[1] == '_'
5967 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5968 && strncmp (q, p + 2, attr_len) == 0)
5969 break;
5970 }
5971 else if (ident_len + 4 == attr_len)
5972 {
5973 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5974 const char *q = IDENTIFIER_POINTER (attr_identifier);
5975 if (q[0] == '_' && q[1] == '_'
5976 && q[attr_len - 2] == '_' && q[attr_len - 1] == '_'
5977 && strncmp (q + 2, p, ident_len) == 0)
5978 break;
5979 }
5980 }
5981 list = TREE_CHAIN (list);
5982 }
5983
5984 return list;
5985 }
5986
5987 /* Remove any instances of attribute ATTR_NAME in LIST and return the
5988 modified list. */
5989
5990 tree
5991 remove_attribute (const char *attr_name, tree list)
5992 {
5993 tree *p;
5994 size_t attr_len = strlen (attr_name);
5995
5996 gcc_checking_assert (attr_name[0] != '_');
5997
5998 for (p = &list; *p; )
5999 {
6000 tree l = *p;
6001 /* TODO: If we were storing attributes in normalized form, here
6002 we could use a simple strcmp(). */
6003 if (private_is_attribute_p (attr_name, attr_len, get_attribute_name (l)))
6004 *p = TREE_CHAIN (l);
6005 else
6006 p = &TREE_CHAIN (l);
6007 }
6008
6009 return list;
6010 }
6011
6012 /* Return an attribute list that is the union of a1 and a2. */
6013
6014 tree
6015 merge_attributes (tree a1, tree a2)
6016 {
6017 tree attributes;
6018
6019 /* Either one unset? Take the set one. */
6020
6021 if ((attributes = a1) == 0)
6022 attributes = a2;
6023
6024 /* One that completely contains the other? Take it. */
6025
6026 else if (a2 != 0 && ! attribute_list_contained (a1, a2))
6027 {
6028 if (attribute_list_contained (a2, a1))
6029 attributes = a2;
6030 else
6031 {
6032 /* Pick the longest list, and hang on the other list. */
6033
6034 if (list_length (a1) < list_length (a2))
6035 attributes = a2, a2 = a1;
6036
6037 for (; a2 != 0; a2 = TREE_CHAIN (a2))
6038 {
6039 tree a;
6040 for (a = lookup_ident_attribute (get_attribute_name (a2),
6041 attributes);
6042 a != NULL_TREE && !attribute_value_equal (a, a2);
6043 a = lookup_ident_attribute (get_attribute_name (a2),
6044 TREE_CHAIN (a)))
6045 ;
6046 if (a == NULL_TREE)
6047 {
6048 a1 = copy_node (a2);
6049 TREE_CHAIN (a1) = attributes;
6050 attributes = a1;
6051 }
6052 }
6053 }
6054 }
6055 return attributes;
6056 }
6057
6058 /* Given types T1 and T2, merge their attributes and return
6059 the result. */
6060
6061 tree
6062 merge_type_attributes (tree t1, tree t2)
6063 {
6064 return merge_attributes (TYPE_ATTRIBUTES (t1),
6065 TYPE_ATTRIBUTES (t2));
6066 }
6067
6068 /* Given decls OLDDECL and NEWDECL, merge their attributes and return
6069 the result. */
6070
6071 tree
6072 merge_decl_attributes (tree olddecl, tree newdecl)
6073 {
6074 return merge_attributes (DECL_ATTRIBUTES (olddecl),
6075 DECL_ATTRIBUTES (newdecl));
6076 }
6077
6078 #if TARGET_DLLIMPORT_DECL_ATTRIBUTES
6079
6080 /* Specialization of merge_decl_attributes for various Windows targets.
6081
6082 This handles the following situation:
6083
6084 __declspec (dllimport) int foo;
6085 int foo;
6086
6087 The second instance of `foo' nullifies the dllimport. */
6088
6089 tree
6090 merge_dllimport_decl_attributes (tree old, tree new_tree)
6091 {
6092 tree a;
6093 int delete_dllimport_p = 1;
6094
6095 /* What we need to do here is remove from `old' dllimport if it doesn't
6096 appear in `new'. dllimport behaves like extern: if a declaration is
6097 marked dllimport and a definition appears later, then the object
6098 is not dllimport'd. We also remove a `new' dllimport if the old list
6099 contains dllexport: dllexport always overrides dllimport, regardless
6100 of the order of declaration. */
6101 if (!VAR_OR_FUNCTION_DECL_P (new_tree))
6102 delete_dllimport_p = 0;
6103 else if (DECL_DLLIMPORT_P (new_tree)
6104 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old)))
6105 {
6106 DECL_DLLIMPORT_P (new_tree) = 0;
6107 warning (OPT_Wattributes, "%q+D already declared with dllexport attribute: "
6108 "dllimport ignored", new_tree);
6109 }
6110 else if (DECL_DLLIMPORT_P (old) && !DECL_DLLIMPORT_P (new_tree))
6111 {
6112 /* Warn about overriding a symbol that has already been used, e.g.:
6113 extern int __attribute__ ((dllimport)) foo;
6114 int* bar () {return &foo;}
6115 int foo;
6116 */
6117 if (TREE_USED (old))
6118 {
6119 warning (0, "%q+D redeclared without dllimport attribute "
6120 "after being referenced with dll linkage", new_tree);
6121 /* If we have used a variable's address with dllimport linkage,
6122 keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the
6123 decl may already have had TREE_CONSTANT computed.
6124 We still remove the attribute so that assembler code refers
6125 to '&foo rather than '_imp__foo'. */
6126 if (TREE_CODE (old) == VAR_DECL && TREE_ADDRESSABLE (old))
6127 DECL_DLLIMPORT_P (new_tree) = 1;
6128 }
6129
6130 /* Let an inline definition silently override the external reference,
6131 but otherwise warn about attribute inconsistency. */
6132 else if (TREE_CODE (new_tree) == VAR_DECL
6133 || !DECL_DECLARED_INLINE_P (new_tree))
6134 warning (OPT_Wattributes, "%q+D redeclared without dllimport attribute: "
6135 "previous dllimport ignored", new_tree);
6136 }
6137 else
6138 delete_dllimport_p = 0;
6139
6140 a = merge_attributes (DECL_ATTRIBUTES (old), DECL_ATTRIBUTES (new_tree));
6141
6142 if (delete_dllimport_p)
6143 a = remove_attribute ("dllimport", a);
6144
6145 return a;
6146 }
6147
6148 /* Handle a "dllimport" or "dllexport" attribute; arguments as in
6149 struct attribute_spec.handler. */
6150
6151 tree
6152 handle_dll_attribute (tree * pnode, tree name, tree args, int flags,
6153 bool *no_add_attrs)
6154 {
6155 tree node = *pnode;
6156 bool is_dllimport;
6157
6158 /* These attributes may apply to structure and union types being created,
6159 but otherwise should pass to the declaration involved. */
6160 if (!DECL_P (node))
6161 {
6162 if (flags & ((int) ATTR_FLAG_DECL_NEXT | (int) ATTR_FLAG_FUNCTION_NEXT
6163 | (int) ATTR_FLAG_ARRAY_NEXT))
6164 {
6165 *no_add_attrs = true;
6166 return tree_cons (name, args, NULL_TREE);
6167 }
6168 if (TREE_CODE (node) == RECORD_TYPE
6169 || TREE_CODE (node) == UNION_TYPE)
6170 {
6171 node = TYPE_NAME (node);
6172 if (!node)
6173 return NULL_TREE;
6174 }
6175 else
6176 {
6177 warning (OPT_Wattributes, "%qE attribute ignored",
6178 name);
6179 *no_add_attrs = true;
6180 return NULL_TREE;
6181 }
6182 }
6183
6184 if (TREE_CODE (node) != FUNCTION_DECL
6185 && TREE_CODE (node) != VAR_DECL
6186 && TREE_CODE (node) != TYPE_DECL)
6187 {
6188 *no_add_attrs = true;
6189 warning (OPT_Wattributes, "%qE attribute ignored",
6190 name);
6191 return NULL_TREE;
6192 }
6193
6194 if (TREE_CODE (node) == TYPE_DECL
6195 && TREE_CODE (TREE_TYPE (node)) != RECORD_TYPE
6196 && TREE_CODE (TREE_TYPE (node)) != UNION_TYPE)
6197 {
6198 *no_add_attrs = true;
6199 warning (OPT_Wattributes, "%qE attribute ignored",
6200 name);
6201 return NULL_TREE;
6202 }
6203
6204 is_dllimport = is_attribute_p ("dllimport", name);
6205
6206 /* Report error on dllimport ambiguities seen now before they cause
6207 any damage. */
6208 if (is_dllimport)
6209 {
6210 /* Honor any target-specific overrides. */
6211 if (!targetm.valid_dllimport_attribute_p (node))
6212 *no_add_attrs = true;
6213
6214 else if (TREE_CODE (node) == FUNCTION_DECL
6215 && DECL_DECLARED_INLINE_P (node))
6216 {
6217 warning (OPT_Wattributes, "inline function %q+D declared as "
6218 " dllimport: attribute ignored", node);
6219 *no_add_attrs = true;
6220 }
6221 /* Like MS, treat definition of dllimported variables and
6222 non-inlined functions on declaration as syntax errors. */
6223 else if (TREE_CODE (node) == FUNCTION_DECL && DECL_INITIAL (node))
6224 {
6225 error ("function %q+D definition is marked dllimport", node);
6226 *no_add_attrs = true;
6227 }
6228
6229 else if (TREE_CODE (node) == VAR_DECL)
6230 {
6231 if (DECL_INITIAL (node))
6232 {
6233 error ("variable %q+D definition is marked dllimport",
6234 node);
6235 *no_add_attrs = true;
6236 }
6237
6238 /* `extern' needn't be specified with dllimport.
6239 Specify `extern' now and hope for the best. Sigh. */
6240 DECL_EXTERNAL (node) = 1;
6241 /* Also, implicitly give dllimport'd variables declared within
6242 a function global scope, unless declared static. */
6243 if (current_function_decl != NULL_TREE && !TREE_STATIC (node))
6244 TREE_PUBLIC (node) = 1;
6245 }
6246
6247 if (*no_add_attrs == false)
6248 DECL_DLLIMPORT_P (node) = 1;
6249 }
6250 else if (TREE_CODE (node) == FUNCTION_DECL
6251 && DECL_DECLARED_INLINE_P (node)
6252 && flag_keep_inline_dllexport)
6253 /* An exported function, even if inline, must be emitted. */
6254 DECL_EXTERNAL (node) = 0;
6255
6256 /* Report error if symbol is not accessible at global scope. */
6257 if (!TREE_PUBLIC (node)
6258 && (TREE_CODE (node) == VAR_DECL
6259 || TREE_CODE (node) == FUNCTION_DECL))
6260 {
6261 error ("external linkage required for symbol %q+D because of "
6262 "%qE attribute", node, name);
6263 *no_add_attrs = true;
6264 }
6265
6266 /* A dllexport'd entity must have default visibility so that other
6267 program units (shared libraries or the main executable) can see
6268 it. A dllimport'd entity must have default visibility so that
6269 the linker knows that undefined references within this program
6270 unit can be resolved by the dynamic linker. */
6271 if (!*no_add_attrs)
6272 {
6273 if (DECL_VISIBILITY_SPECIFIED (node)
6274 && DECL_VISIBILITY (node) != VISIBILITY_DEFAULT)
6275 error ("%qE implies default visibility, but %qD has already "
6276 "been declared with a different visibility",
6277 name, node);
6278 DECL_VISIBILITY (node) = VISIBILITY_DEFAULT;
6279 DECL_VISIBILITY_SPECIFIED (node) = 1;
6280 }
6281
6282 return NULL_TREE;
6283 }
6284
6285 #endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */
6286 \f
6287 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6288 of the various TYPE_QUAL values. */
6289
6290 static void
6291 set_type_quals (tree type, int type_quals)
6292 {
6293 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6294 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6295 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6296 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6297 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6298 }
6299
6300 /* Returns true iff unqualified CAND and BASE are equivalent. */
6301
6302 bool
6303 check_base_type (const_tree cand, const_tree base)
6304 {
6305 return (TYPE_NAME (cand) == TYPE_NAME (base)
6306 /* Apparently this is needed for Objective-C. */
6307 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6308 /* Check alignment. */
6309 && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
6310 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6311 TYPE_ATTRIBUTES (base)));
6312 }
6313
6314 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6315
6316 bool
6317 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6318 {
6319 return (TYPE_QUALS (cand) == type_quals
6320 && check_base_type (cand, base));
6321 }
6322
6323 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6324
6325 static bool
6326 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6327 {
6328 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6329 && TYPE_NAME (cand) == TYPE_NAME (base)
6330 /* Apparently this is needed for Objective-C. */
6331 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6332 /* Check alignment. */
6333 && TYPE_ALIGN (cand) == align
6334 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6335 TYPE_ATTRIBUTES (base)));
6336 }
6337
6338 /* This function checks to see if TYPE matches the size one of the built-in
6339 atomic types, and returns that core atomic type. */
6340
6341 static tree
6342 find_atomic_core_type (tree type)
6343 {
6344 tree base_atomic_type;
6345
6346 /* Only handle complete types. */
6347 if (TYPE_SIZE (type) == NULL_TREE)
6348 return NULL_TREE;
6349
6350 HOST_WIDE_INT type_size = tree_to_uhwi (TYPE_SIZE (type));
6351 switch (type_size)
6352 {
6353 case 8:
6354 base_atomic_type = atomicQI_type_node;
6355 break;
6356
6357 case 16:
6358 base_atomic_type = atomicHI_type_node;
6359 break;
6360
6361 case 32:
6362 base_atomic_type = atomicSI_type_node;
6363 break;
6364
6365 case 64:
6366 base_atomic_type = atomicDI_type_node;
6367 break;
6368
6369 case 128:
6370 base_atomic_type = atomicTI_type_node;
6371 break;
6372
6373 default:
6374 base_atomic_type = NULL_TREE;
6375 }
6376
6377 return base_atomic_type;
6378 }
6379
6380 /* Return a version of the TYPE, qualified as indicated by the
6381 TYPE_QUALS, if one exists. If no qualified version exists yet,
6382 return NULL_TREE. */
6383
6384 tree
6385 get_qualified_type (tree type, int type_quals)
6386 {
6387 tree t;
6388
6389 if (TYPE_QUALS (type) == type_quals)
6390 return type;
6391
6392 /* Search the chain of variants to see if there is already one there just
6393 like the one we need to have. If so, use that existing one. We must
6394 preserve the TYPE_NAME, since there is code that depends on this. */
6395 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6396 if (check_qualified_type (t, type, type_quals))
6397 return t;
6398
6399 return NULL_TREE;
6400 }
6401
6402 /* Like get_qualified_type, but creates the type if it does not
6403 exist. This function never returns NULL_TREE. */
6404
6405 tree
6406 build_qualified_type (tree type, int type_quals)
6407 {
6408 tree t;
6409
6410 /* See if we already have the appropriate qualified variant. */
6411 t = get_qualified_type (type, type_quals);
6412
6413 /* If not, build it. */
6414 if (!t)
6415 {
6416 t = build_variant_type_copy (type);
6417 set_type_quals (t, type_quals);
6418
6419 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6420 {
6421 /* See if this object can map to a basic atomic type. */
6422 tree atomic_type = find_atomic_core_type (type);
6423 if (atomic_type)
6424 {
6425 /* Ensure the alignment of this type is compatible with
6426 the required alignment of the atomic type. */
6427 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6428 TYPE_ALIGN (t) = TYPE_ALIGN (atomic_type);
6429 }
6430 }
6431
6432 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6433 /* Propagate structural equality. */
6434 SET_TYPE_STRUCTURAL_EQUALITY (t);
6435 else if (TYPE_CANONICAL (type) != type)
6436 /* Build the underlying canonical type, since it is different
6437 from TYPE. */
6438 {
6439 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6440 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6441 }
6442 else
6443 /* T is its own canonical type. */
6444 TYPE_CANONICAL (t) = t;
6445
6446 }
6447
6448 return t;
6449 }
6450
6451 /* Create a variant of type T with alignment ALIGN. */
6452
6453 tree
6454 build_aligned_type (tree type, unsigned int align)
6455 {
6456 tree t;
6457
6458 if (TYPE_PACKED (type)
6459 || TYPE_ALIGN (type) == align)
6460 return type;
6461
6462 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6463 if (check_aligned_type (t, type, align))
6464 return t;
6465
6466 t = build_variant_type_copy (type);
6467 TYPE_ALIGN (t) = align;
6468
6469 return t;
6470 }
6471
6472 /* Create a new distinct copy of TYPE. The new type is made its own
6473 MAIN_VARIANT. If TYPE requires structural equality checks, the
6474 resulting type requires structural equality checks; otherwise, its
6475 TYPE_CANONICAL points to itself. */
6476
6477 tree
6478 build_distinct_type_copy (tree type)
6479 {
6480 tree t = copy_node (type);
6481
6482 TYPE_POINTER_TO (t) = 0;
6483 TYPE_REFERENCE_TO (t) = 0;
6484
6485 /* Set the canonical type either to a new equivalence class, or
6486 propagate the need for structural equality checks. */
6487 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6488 SET_TYPE_STRUCTURAL_EQUALITY (t);
6489 else
6490 TYPE_CANONICAL (t) = t;
6491
6492 /* Make it its own variant. */
6493 TYPE_MAIN_VARIANT (t) = t;
6494 TYPE_NEXT_VARIANT (t) = 0;
6495
6496 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6497 whose TREE_TYPE is not t. This can also happen in the Ada
6498 frontend when using subtypes. */
6499
6500 return t;
6501 }
6502
6503 /* Create a new variant of TYPE, equivalent but distinct. This is so
6504 the caller can modify it. TYPE_CANONICAL for the return type will
6505 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6506 are considered equal by the language itself (or that both types
6507 require structural equality checks). */
6508
6509 tree
6510 build_variant_type_copy (tree type)
6511 {
6512 tree t, m = TYPE_MAIN_VARIANT (type);
6513
6514 t = build_distinct_type_copy (type);
6515
6516 /* Since we're building a variant, assume that it is a non-semantic
6517 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6518 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6519
6520 /* Add the new type to the chain of variants of TYPE. */
6521 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6522 TYPE_NEXT_VARIANT (m) = t;
6523 TYPE_MAIN_VARIANT (t) = m;
6524
6525 return t;
6526 }
6527 \f
6528 /* Return true if the from tree in both tree maps are equal. */
6529
6530 int
6531 tree_map_base_eq (const void *va, const void *vb)
6532 {
6533 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6534 *const b = (const struct tree_map_base *) vb;
6535 return (a->from == b->from);
6536 }
6537
6538 /* Hash a from tree in a tree_base_map. */
6539
6540 unsigned int
6541 tree_map_base_hash (const void *item)
6542 {
6543 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6544 }
6545
6546 /* Return true if this tree map structure is marked for garbage collection
6547 purposes. We simply return true if the from tree is marked, so that this
6548 structure goes away when the from tree goes away. */
6549
6550 int
6551 tree_map_base_marked_p (const void *p)
6552 {
6553 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6554 }
6555
6556 /* Hash a from tree in a tree_map. */
6557
6558 unsigned int
6559 tree_map_hash (const void *item)
6560 {
6561 return (((const struct tree_map *) item)->hash);
6562 }
6563
6564 /* Hash a from tree in a tree_decl_map. */
6565
6566 unsigned int
6567 tree_decl_map_hash (const void *item)
6568 {
6569 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6570 }
6571
6572 /* Return the initialization priority for DECL. */
6573
6574 priority_type
6575 decl_init_priority_lookup (tree decl)
6576 {
6577 symtab_node *snode = symtab_node::get (decl);
6578
6579 if (!snode)
6580 return DEFAULT_INIT_PRIORITY;
6581 return
6582 snode->get_init_priority ();
6583 }
6584
6585 /* Return the finalization priority for DECL. */
6586
6587 priority_type
6588 decl_fini_priority_lookup (tree decl)
6589 {
6590 cgraph_node *node = cgraph_node::get (decl);
6591
6592 if (!node)
6593 return DEFAULT_INIT_PRIORITY;
6594 return
6595 node->get_fini_priority ();
6596 }
6597
6598 /* Set the initialization priority for DECL to PRIORITY. */
6599
6600 void
6601 decl_init_priority_insert (tree decl, priority_type priority)
6602 {
6603 struct symtab_node *snode;
6604
6605 if (priority == DEFAULT_INIT_PRIORITY)
6606 {
6607 snode = symtab_node::get (decl);
6608 if (!snode)
6609 return;
6610 }
6611 else if (TREE_CODE (decl) == VAR_DECL)
6612 snode = varpool_node::get_create (decl);
6613 else
6614 snode = cgraph_node::get_create (decl);
6615 snode->set_init_priority (priority);
6616 }
6617
6618 /* Set the finalization priority for DECL to PRIORITY. */
6619
6620 void
6621 decl_fini_priority_insert (tree decl, priority_type priority)
6622 {
6623 struct cgraph_node *node;
6624
6625 if (priority == DEFAULT_INIT_PRIORITY)
6626 {
6627 node = cgraph_node::get (decl);
6628 if (!node)
6629 return;
6630 }
6631 else
6632 node = cgraph_node::get_create (decl);
6633 node->set_fini_priority (priority);
6634 }
6635
6636 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6637
6638 static void
6639 print_debug_expr_statistics (void)
6640 {
6641 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6642 (long) debug_expr_for_decl->size (),
6643 (long) debug_expr_for_decl->elements (),
6644 debug_expr_for_decl->collisions ());
6645 }
6646
6647 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6648
6649 static void
6650 print_value_expr_statistics (void)
6651 {
6652 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6653 (long) value_expr_for_decl->size (),
6654 (long) value_expr_for_decl->elements (),
6655 value_expr_for_decl->collisions ());
6656 }
6657
6658 /* Lookup a debug expression for FROM, and return it if we find one. */
6659
6660 tree
6661 decl_debug_expr_lookup (tree from)
6662 {
6663 struct tree_decl_map *h, in;
6664 in.base.from = from;
6665
6666 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6667 if (h)
6668 return h->to;
6669 return NULL_TREE;
6670 }
6671
6672 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6673
6674 void
6675 decl_debug_expr_insert (tree from, tree to)
6676 {
6677 struct tree_decl_map *h;
6678
6679 h = ggc_alloc<tree_decl_map> ();
6680 h->base.from = from;
6681 h->to = to;
6682 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6683 }
6684
6685 /* Lookup a value expression for FROM, and return it if we find one. */
6686
6687 tree
6688 decl_value_expr_lookup (tree from)
6689 {
6690 struct tree_decl_map *h, in;
6691 in.base.from = from;
6692
6693 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6694 if (h)
6695 return h->to;
6696 return NULL_TREE;
6697 }
6698
6699 /* Insert a mapping FROM->TO in the value expression hashtable. */
6700
6701 void
6702 decl_value_expr_insert (tree from, tree to)
6703 {
6704 struct tree_decl_map *h;
6705
6706 h = ggc_alloc<tree_decl_map> ();
6707 h->base.from = from;
6708 h->to = to;
6709 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6710 }
6711
6712 /* Lookup a vector of debug arguments for FROM, and return it if we
6713 find one. */
6714
6715 vec<tree, va_gc> **
6716 decl_debug_args_lookup (tree from)
6717 {
6718 struct tree_vec_map *h, in;
6719
6720 if (!DECL_HAS_DEBUG_ARGS_P (from))
6721 return NULL;
6722 gcc_checking_assert (debug_args_for_decl != NULL);
6723 in.base.from = from;
6724 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
6725 if (h)
6726 return &h->to;
6727 return NULL;
6728 }
6729
6730 /* Insert a mapping FROM->empty vector of debug arguments in the value
6731 expression hashtable. */
6732
6733 vec<tree, va_gc> **
6734 decl_debug_args_insert (tree from)
6735 {
6736 struct tree_vec_map *h;
6737 tree_vec_map **loc;
6738
6739 if (DECL_HAS_DEBUG_ARGS_P (from))
6740 return decl_debug_args_lookup (from);
6741 if (debug_args_for_decl == NULL)
6742 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
6743 h = ggc_alloc<tree_vec_map> ();
6744 h->base.from = from;
6745 h->to = NULL;
6746 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
6747 *loc = h;
6748 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6749 return &h->to;
6750 }
6751
6752 /* Hashing of types so that we don't make duplicates.
6753 The entry point is `type_hash_canon'. */
6754
6755 /* Compute a hash code for a list of types (chain of TREE_LIST nodes
6756 with types in the TREE_VALUE slots), by adding the hash codes
6757 of the individual types. */
6758
6759 static void
6760 type_hash_list (const_tree list, inchash::hash &hstate)
6761 {
6762 const_tree tail;
6763
6764 for (tail = list; tail; tail = TREE_CHAIN (tail))
6765 if (TREE_VALUE (tail) != error_mark_node)
6766 hstate.add_object (TYPE_HASH (TREE_VALUE (tail)));
6767 }
6768
6769 /* These are the Hashtable callback functions. */
6770
6771 /* Returns true iff the types are equivalent. */
6772
6773 bool
6774 type_cache_hasher::equal (type_hash *a, type_hash *b)
6775 {
6776 /* First test the things that are the same for all types. */
6777 if (a->hash != b->hash
6778 || TREE_CODE (a->type) != TREE_CODE (b->type)
6779 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6780 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6781 TYPE_ATTRIBUTES (b->type))
6782 || (TREE_CODE (a->type) != COMPLEX_TYPE
6783 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6784 return 0;
6785
6786 /* Be careful about comparing arrays before and after the element type
6787 has been completed; don't compare TYPE_ALIGN unless both types are
6788 complete. */
6789 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6790 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6791 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6792 return 0;
6793
6794 switch (TREE_CODE (a->type))
6795 {
6796 case VOID_TYPE:
6797 case COMPLEX_TYPE:
6798 case POINTER_TYPE:
6799 case REFERENCE_TYPE:
6800 case NULLPTR_TYPE:
6801 return 1;
6802
6803 case VECTOR_TYPE:
6804 return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type);
6805
6806 case ENUMERAL_TYPE:
6807 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6808 && !(TYPE_VALUES (a->type)
6809 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6810 && TYPE_VALUES (b->type)
6811 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6812 && type_list_equal (TYPE_VALUES (a->type),
6813 TYPE_VALUES (b->type))))
6814 return 0;
6815
6816 /* ... fall through ... */
6817
6818 case INTEGER_TYPE:
6819 case REAL_TYPE:
6820 case BOOLEAN_TYPE:
6821 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6822 return false;
6823 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6824 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6825 TYPE_MAX_VALUE (b->type)))
6826 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6827 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6828 TYPE_MIN_VALUE (b->type))));
6829
6830 case FIXED_POINT_TYPE:
6831 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6832
6833 case OFFSET_TYPE:
6834 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6835
6836 case METHOD_TYPE:
6837 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6838 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6839 || (TYPE_ARG_TYPES (a->type)
6840 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6841 && TYPE_ARG_TYPES (b->type)
6842 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6843 && type_list_equal (TYPE_ARG_TYPES (a->type),
6844 TYPE_ARG_TYPES (b->type)))))
6845 break;
6846 return 0;
6847 case ARRAY_TYPE:
6848 return TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type);
6849
6850 case RECORD_TYPE:
6851 case UNION_TYPE:
6852 case QUAL_UNION_TYPE:
6853 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6854 || (TYPE_FIELDS (a->type)
6855 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6856 && TYPE_FIELDS (b->type)
6857 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6858 && type_list_equal (TYPE_FIELDS (a->type),
6859 TYPE_FIELDS (b->type))));
6860
6861 case FUNCTION_TYPE:
6862 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6863 || (TYPE_ARG_TYPES (a->type)
6864 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6865 && TYPE_ARG_TYPES (b->type)
6866 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6867 && type_list_equal (TYPE_ARG_TYPES (a->type),
6868 TYPE_ARG_TYPES (b->type))))
6869 break;
6870 return 0;
6871
6872 default:
6873 return 0;
6874 }
6875
6876 if (lang_hooks.types.type_hash_eq != NULL)
6877 return lang_hooks.types.type_hash_eq (a->type, b->type);
6878
6879 return 1;
6880 }
6881
6882 /* Given TYPE, and HASHCODE its hash code, return the canonical
6883 object for an identical type if one already exists.
6884 Otherwise, return TYPE, and record it as the canonical object.
6885
6886 To use this function, first create a type of the sort you want.
6887 Then compute its hash code from the fields of the type that
6888 make it different from other similar types.
6889 Then call this function and use the value. */
6890
6891 tree
6892 type_hash_canon (unsigned int hashcode, tree type)
6893 {
6894 type_hash in;
6895 type_hash **loc;
6896
6897 /* The hash table only contains main variants, so ensure that's what we're
6898 being passed. */
6899 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6900
6901 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6902 must call that routine before comparing TYPE_ALIGNs. */
6903 layout_type (type);
6904
6905 in.hash = hashcode;
6906 in.type = type;
6907
6908 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
6909 if (*loc)
6910 {
6911 tree t1 = ((type_hash *) *loc)->type;
6912 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1);
6913 if (GATHER_STATISTICS)
6914 {
6915 tree_code_counts[(int) TREE_CODE (type)]--;
6916 tree_node_counts[(int) t_kind]--;
6917 tree_node_sizes[(int) t_kind] -= sizeof (struct tree_type_non_common);
6918 }
6919 return t1;
6920 }
6921 else
6922 {
6923 struct type_hash *h;
6924
6925 h = ggc_alloc<type_hash> ();
6926 h->hash = hashcode;
6927 h->type = type;
6928 *loc = h;
6929
6930 return type;
6931 }
6932 }
6933
6934 static void
6935 print_type_hash_statistics (void)
6936 {
6937 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
6938 (long) type_hash_table->size (),
6939 (long) type_hash_table->elements (),
6940 type_hash_table->collisions ());
6941 }
6942
6943 /* Compute a hash code for a list of attributes (chain of TREE_LIST nodes
6944 with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots),
6945 by adding the hash codes of the individual attributes. */
6946
6947 static void
6948 attribute_hash_list (const_tree list, inchash::hash &hstate)
6949 {
6950 const_tree tail;
6951
6952 for (tail = list; tail; tail = TREE_CHAIN (tail))
6953 /* ??? Do we want to add in TREE_VALUE too? */
6954 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (tail)));
6955 }
6956
6957 /* Given two lists of attributes, return true if list l2 is
6958 equivalent to l1. */
6959
6960 int
6961 attribute_list_equal (const_tree l1, const_tree l2)
6962 {
6963 if (l1 == l2)
6964 return 1;
6965
6966 return attribute_list_contained (l1, l2)
6967 && attribute_list_contained (l2, l1);
6968 }
6969
6970 /* Given two lists of attributes, return true if list L2 is
6971 completely contained within L1. */
6972 /* ??? This would be faster if attribute names were stored in a canonicalized
6973 form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method
6974 must be used to show these elements are equivalent (which they are). */
6975 /* ??? It's not clear that attributes with arguments will always be handled
6976 correctly. */
6977
6978 int
6979 attribute_list_contained (const_tree l1, const_tree l2)
6980 {
6981 const_tree t1, t2;
6982
6983 /* First check the obvious, maybe the lists are identical. */
6984 if (l1 == l2)
6985 return 1;
6986
6987 /* Maybe the lists are similar. */
6988 for (t1 = l1, t2 = l2;
6989 t1 != 0 && t2 != 0
6990 && get_attribute_name (t1) == get_attribute_name (t2)
6991 && TREE_VALUE (t1) == TREE_VALUE (t2);
6992 t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6993 ;
6994
6995 /* Maybe the lists are equal. */
6996 if (t1 == 0 && t2 == 0)
6997 return 1;
6998
6999 for (; t2 != 0; t2 = TREE_CHAIN (t2))
7000 {
7001 const_tree attr;
7002 /* This CONST_CAST is okay because lookup_attribute does not
7003 modify its argument and the return value is assigned to a
7004 const_tree. */
7005 for (attr = lookup_ident_attribute (get_attribute_name (t2),
7006 CONST_CAST_TREE (l1));
7007 attr != NULL_TREE && !attribute_value_equal (t2, attr);
7008 attr = lookup_ident_attribute (get_attribute_name (t2),
7009 TREE_CHAIN (attr)))
7010 ;
7011
7012 if (attr == NULL_TREE)
7013 return 0;
7014 }
7015
7016 return 1;
7017 }
7018
7019 /* Given two lists of types
7020 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
7021 return 1 if the lists contain the same types in the same order.
7022 Also, the TREE_PURPOSEs must match. */
7023
7024 int
7025 type_list_equal (const_tree l1, const_tree l2)
7026 {
7027 const_tree t1, t2;
7028
7029 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7030 if (TREE_VALUE (t1) != TREE_VALUE (t2)
7031 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
7032 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
7033 && (TREE_TYPE (TREE_PURPOSE (t1))
7034 == TREE_TYPE (TREE_PURPOSE (t2))))))
7035 return 0;
7036
7037 return t1 == t2;
7038 }
7039
7040 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
7041 given by TYPE. If the argument list accepts variable arguments,
7042 then this function counts only the ordinary arguments. */
7043
7044 int
7045 type_num_arguments (const_tree type)
7046 {
7047 int i = 0;
7048 tree t;
7049
7050 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
7051 /* If the function does not take a variable number of arguments,
7052 the last element in the list will have type `void'. */
7053 if (VOID_TYPE_P (TREE_VALUE (t)))
7054 break;
7055 else
7056 ++i;
7057
7058 return i;
7059 }
7060
7061 /* Nonzero if integer constants T1 and T2
7062 represent the same constant value. */
7063
7064 int
7065 tree_int_cst_equal (const_tree t1, const_tree t2)
7066 {
7067 if (t1 == t2)
7068 return 1;
7069
7070 if (t1 == 0 || t2 == 0)
7071 return 0;
7072
7073 if (TREE_CODE (t1) == INTEGER_CST
7074 && TREE_CODE (t2) == INTEGER_CST
7075 && wi::to_widest (t1) == wi::to_widest (t2))
7076 return 1;
7077
7078 return 0;
7079 }
7080
7081 /* Return true if T is an INTEGER_CST whose numerical value (extended
7082 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
7083
7084 bool
7085 tree_fits_shwi_p (const_tree t)
7086 {
7087 return (t != NULL_TREE
7088 && TREE_CODE (t) == INTEGER_CST
7089 && wi::fits_shwi_p (wi::to_widest (t)));
7090 }
7091
7092 /* Return true if T is an INTEGER_CST whose numerical value (extended
7093 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
7094
7095 bool
7096 tree_fits_uhwi_p (const_tree t)
7097 {
7098 return (t != NULL_TREE
7099 && TREE_CODE (t) == INTEGER_CST
7100 && wi::fits_uhwi_p (wi::to_widest (t)));
7101 }
7102
7103 /* T is an INTEGER_CST whose numerical value (extended according to
7104 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
7105 HOST_WIDE_INT. */
7106
7107 HOST_WIDE_INT
7108 tree_to_shwi (const_tree t)
7109 {
7110 gcc_assert (tree_fits_shwi_p (t));
7111 return TREE_INT_CST_LOW (t);
7112 }
7113
7114 /* T is an INTEGER_CST whose numerical value (extended according to
7115 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7116 HOST_WIDE_INT. */
7117
7118 unsigned HOST_WIDE_INT
7119 tree_to_uhwi (const_tree t)
7120 {
7121 gcc_assert (tree_fits_uhwi_p (t));
7122 return TREE_INT_CST_LOW (t);
7123 }
7124
7125 /* Return the most significant (sign) bit of T. */
7126
7127 int
7128 tree_int_cst_sign_bit (const_tree t)
7129 {
7130 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7131
7132 return wi::extract_uhwi (t, bitno, 1);
7133 }
7134
7135 /* Return an indication of the sign of the integer constant T.
7136 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7137 Note that -1 will never be returned if T's type is unsigned. */
7138
7139 int
7140 tree_int_cst_sgn (const_tree t)
7141 {
7142 if (wi::eq_p (t, 0))
7143 return 0;
7144 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7145 return 1;
7146 else if (wi::neg_p (t))
7147 return -1;
7148 else
7149 return 1;
7150 }
7151
7152 /* Return the minimum number of bits needed to represent VALUE in a
7153 signed or unsigned type, UNSIGNEDP says which. */
7154
7155 unsigned int
7156 tree_int_cst_min_precision (tree value, signop sgn)
7157 {
7158 /* If the value is negative, compute its negative minus 1. The latter
7159 adjustment is because the absolute value of the largest negative value
7160 is one larger than the largest positive value. This is equivalent to
7161 a bit-wise negation, so use that operation instead. */
7162
7163 if (tree_int_cst_sgn (value) < 0)
7164 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7165
7166 /* Return the number of bits needed, taking into account the fact
7167 that we need one more bit for a signed than unsigned type.
7168 If value is 0 or -1, the minimum precision is 1 no matter
7169 whether unsignedp is true or false. */
7170
7171 if (integer_zerop (value))
7172 return 1;
7173 else
7174 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7175 }
7176
7177 /* Return truthvalue of whether T1 is the same tree structure as T2.
7178 Return 1 if they are the same.
7179 Return 0 if they are understandably different.
7180 Return -1 if either contains tree structure not understood by
7181 this function. */
7182
7183 int
7184 simple_cst_equal (const_tree t1, const_tree t2)
7185 {
7186 enum tree_code code1, code2;
7187 int cmp;
7188 int i;
7189
7190 if (t1 == t2)
7191 return 1;
7192 if (t1 == 0 || t2 == 0)
7193 return 0;
7194
7195 code1 = TREE_CODE (t1);
7196 code2 = TREE_CODE (t2);
7197
7198 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7199 {
7200 if (CONVERT_EXPR_CODE_P (code2)
7201 || code2 == NON_LVALUE_EXPR)
7202 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7203 else
7204 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7205 }
7206
7207 else if (CONVERT_EXPR_CODE_P (code2)
7208 || code2 == NON_LVALUE_EXPR)
7209 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7210
7211 if (code1 != code2)
7212 return 0;
7213
7214 switch (code1)
7215 {
7216 case INTEGER_CST:
7217 return wi::to_widest (t1) == wi::to_widest (t2);
7218
7219 case REAL_CST:
7220 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (t1), TREE_REAL_CST (t2));
7221
7222 case FIXED_CST:
7223 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7224
7225 case STRING_CST:
7226 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7227 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7228 TREE_STRING_LENGTH (t1)));
7229
7230 case CONSTRUCTOR:
7231 {
7232 unsigned HOST_WIDE_INT idx;
7233 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7234 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7235
7236 if (vec_safe_length (v1) != vec_safe_length (v2))
7237 return false;
7238
7239 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7240 /* ??? Should we handle also fields here? */
7241 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7242 return false;
7243 return true;
7244 }
7245
7246 case SAVE_EXPR:
7247 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7248
7249 case CALL_EXPR:
7250 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7251 if (cmp <= 0)
7252 return cmp;
7253 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7254 return 0;
7255 {
7256 const_tree arg1, arg2;
7257 const_call_expr_arg_iterator iter1, iter2;
7258 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7259 arg2 = first_const_call_expr_arg (t2, &iter2);
7260 arg1 && arg2;
7261 arg1 = next_const_call_expr_arg (&iter1),
7262 arg2 = next_const_call_expr_arg (&iter2))
7263 {
7264 cmp = simple_cst_equal (arg1, arg2);
7265 if (cmp <= 0)
7266 return cmp;
7267 }
7268 return arg1 == arg2;
7269 }
7270
7271 case TARGET_EXPR:
7272 /* Special case: if either target is an unallocated VAR_DECL,
7273 it means that it's going to be unified with whatever the
7274 TARGET_EXPR is really supposed to initialize, so treat it
7275 as being equivalent to anything. */
7276 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7277 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7278 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7279 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7280 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7281 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7282 cmp = 1;
7283 else
7284 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7285
7286 if (cmp <= 0)
7287 return cmp;
7288
7289 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7290
7291 case WITH_CLEANUP_EXPR:
7292 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7293 if (cmp <= 0)
7294 return cmp;
7295
7296 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7297
7298 case COMPONENT_REF:
7299 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7300 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7301
7302 return 0;
7303
7304 case VAR_DECL:
7305 case PARM_DECL:
7306 case CONST_DECL:
7307 case FUNCTION_DECL:
7308 return 0;
7309
7310 default:
7311 break;
7312 }
7313
7314 /* This general rule works for most tree codes. All exceptions should be
7315 handled above. If this is a language-specific tree code, we can't
7316 trust what might be in the operand, so say we don't know
7317 the situation. */
7318 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7319 return -1;
7320
7321 switch (TREE_CODE_CLASS (code1))
7322 {
7323 case tcc_unary:
7324 case tcc_binary:
7325 case tcc_comparison:
7326 case tcc_expression:
7327 case tcc_reference:
7328 case tcc_statement:
7329 cmp = 1;
7330 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7331 {
7332 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7333 if (cmp <= 0)
7334 return cmp;
7335 }
7336
7337 return cmp;
7338
7339 default:
7340 return -1;
7341 }
7342 }
7343
7344 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7345 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7346 than U, respectively. */
7347
7348 int
7349 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7350 {
7351 if (tree_int_cst_sgn (t) < 0)
7352 return -1;
7353 else if (!tree_fits_uhwi_p (t))
7354 return 1;
7355 else if (TREE_INT_CST_LOW (t) == u)
7356 return 0;
7357 else if (TREE_INT_CST_LOW (t) < u)
7358 return -1;
7359 else
7360 return 1;
7361 }
7362
7363 /* Return true if SIZE represents a constant size that is in bounds of
7364 what the middle-end and the backend accepts (covering not more than
7365 half of the address-space). */
7366
7367 bool
7368 valid_constant_size_p (const_tree size)
7369 {
7370 if (! tree_fits_uhwi_p (size)
7371 || TREE_OVERFLOW (size)
7372 || tree_int_cst_sign_bit (size) != 0)
7373 return false;
7374 return true;
7375 }
7376
7377 /* Return the precision of the type, or for a complex or vector type the
7378 precision of the type of its elements. */
7379
7380 unsigned int
7381 element_precision (const_tree type)
7382 {
7383 enum tree_code code = TREE_CODE (type);
7384 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7385 type = TREE_TYPE (type);
7386
7387 return TYPE_PRECISION (type);
7388 }
7389
7390 /* Return true if CODE represents an associative tree code. Otherwise
7391 return false. */
7392 bool
7393 associative_tree_code (enum tree_code code)
7394 {
7395 switch (code)
7396 {
7397 case BIT_IOR_EXPR:
7398 case BIT_AND_EXPR:
7399 case BIT_XOR_EXPR:
7400 case PLUS_EXPR:
7401 case MULT_EXPR:
7402 case MIN_EXPR:
7403 case MAX_EXPR:
7404 return true;
7405
7406 default:
7407 break;
7408 }
7409 return false;
7410 }
7411
7412 /* Return true if CODE represents a commutative tree code. Otherwise
7413 return false. */
7414 bool
7415 commutative_tree_code (enum tree_code code)
7416 {
7417 switch (code)
7418 {
7419 case PLUS_EXPR:
7420 case MULT_EXPR:
7421 case MULT_HIGHPART_EXPR:
7422 case MIN_EXPR:
7423 case MAX_EXPR:
7424 case BIT_IOR_EXPR:
7425 case BIT_XOR_EXPR:
7426 case BIT_AND_EXPR:
7427 case NE_EXPR:
7428 case EQ_EXPR:
7429 case UNORDERED_EXPR:
7430 case ORDERED_EXPR:
7431 case UNEQ_EXPR:
7432 case LTGT_EXPR:
7433 case TRUTH_AND_EXPR:
7434 case TRUTH_XOR_EXPR:
7435 case TRUTH_OR_EXPR:
7436 case WIDEN_MULT_EXPR:
7437 case VEC_WIDEN_MULT_HI_EXPR:
7438 case VEC_WIDEN_MULT_LO_EXPR:
7439 case VEC_WIDEN_MULT_EVEN_EXPR:
7440 case VEC_WIDEN_MULT_ODD_EXPR:
7441 return true;
7442
7443 default:
7444 break;
7445 }
7446 return false;
7447 }
7448
7449 /* Return true if CODE represents a ternary tree code for which the
7450 first two operands are commutative. Otherwise return false. */
7451 bool
7452 commutative_ternary_tree_code (enum tree_code code)
7453 {
7454 switch (code)
7455 {
7456 case WIDEN_MULT_PLUS_EXPR:
7457 case WIDEN_MULT_MINUS_EXPR:
7458 case DOT_PROD_EXPR:
7459 case FMA_EXPR:
7460 return true;
7461
7462 default:
7463 break;
7464 }
7465 return false;
7466 }
7467
7468 namespace inchash
7469 {
7470
7471 /* Generate a hash value for an expression. This can be used iteratively
7472 by passing a previous result as the HSTATE argument.
7473
7474 This function is intended to produce the same hash for expressions which
7475 would compare equal using operand_equal_p. */
7476 void
7477 add_expr (const_tree t, inchash::hash &hstate)
7478 {
7479 int i;
7480 enum tree_code code;
7481 enum tree_code_class tclass;
7482
7483 if (t == NULL_TREE)
7484 {
7485 hstate.merge_hash (0);
7486 return;
7487 }
7488
7489 code = TREE_CODE (t);
7490
7491 switch (code)
7492 {
7493 /* Alas, constants aren't shared, so we can't rely on pointer
7494 identity. */
7495 case VOID_CST:
7496 hstate.merge_hash (0);
7497 return;
7498 case INTEGER_CST:
7499 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
7500 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
7501 return;
7502 case REAL_CST:
7503 {
7504 unsigned int val2 = real_hash (TREE_REAL_CST_PTR (t));
7505 hstate.merge_hash (val2);
7506 return;
7507 }
7508 case FIXED_CST:
7509 {
7510 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7511 hstate.merge_hash (val2);
7512 return;
7513 }
7514 case STRING_CST:
7515 hstate.add ((const void *) TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
7516 return;
7517 case COMPLEX_CST:
7518 inchash::add_expr (TREE_REALPART (t), hstate);
7519 inchash::add_expr (TREE_IMAGPART (t), hstate);
7520 return;
7521 case VECTOR_CST:
7522 {
7523 unsigned i;
7524 for (i = 0; i < VECTOR_CST_NELTS (t); ++i)
7525 inchash::add_expr (VECTOR_CST_ELT (t, i), hstate);
7526 return;
7527 }
7528 case SSA_NAME:
7529 /* We can just compare by pointer. */
7530 hstate.add_wide_int (SSA_NAME_VERSION (t));
7531 return;
7532 case PLACEHOLDER_EXPR:
7533 /* The node itself doesn't matter. */
7534 return;
7535 case TREE_LIST:
7536 /* A list of expressions, for a CALL_EXPR or as the elements of a
7537 VECTOR_CST. */
7538 for (; t; t = TREE_CHAIN (t))
7539 inchash::add_expr (TREE_VALUE (t), hstate);
7540 return;
7541 case CONSTRUCTOR:
7542 {
7543 unsigned HOST_WIDE_INT idx;
7544 tree field, value;
7545 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7546 {
7547 inchash::add_expr (field, hstate);
7548 inchash::add_expr (value, hstate);
7549 }
7550 return;
7551 }
7552 case FUNCTION_DECL:
7553 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7554 Otherwise nodes that compare equal according to operand_equal_p might
7555 get different hash codes. However, don't do this for machine specific
7556 or front end builtins, since the function code is overloaded in those
7557 cases. */
7558 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7559 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7560 {
7561 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7562 code = TREE_CODE (t);
7563 }
7564 /* FALL THROUGH */
7565 default:
7566 tclass = TREE_CODE_CLASS (code);
7567
7568 if (tclass == tcc_declaration)
7569 {
7570 /* DECL's have a unique ID */
7571 hstate.add_wide_int (DECL_UID (t));
7572 }
7573 else
7574 {
7575 gcc_assert (IS_EXPR_CODE_CLASS (tclass));
7576
7577 hstate.add_object (code);
7578
7579 /* Don't hash the type, that can lead to having nodes which
7580 compare equal according to operand_equal_p, but which
7581 have different hash codes. */
7582 if (CONVERT_EXPR_CODE_P (code)
7583 || code == NON_LVALUE_EXPR)
7584 {
7585 /* Make sure to include signness in the hash computation. */
7586 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7587 inchash::add_expr (TREE_OPERAND (t, 0), hstate);
7588 }
7589
7590 else if (commutative_tree_code (code))
7591 {
7592 /* It's a commutative expression. We want to hash it the same
7593 however it appears. We do this by first hashing both operands
7594 and then rehashing based on the order of their independent
7595 hashes. */
7596 inchash::hash one, two;
7597 inchash::add_expr (TREE_OPERAND (t, 0), one);
7598 inchash::add_expr (TREE_OPERAND (t, 1), two);
7599 hstate.add_commutative (one, two);
7600 }
7601 else
7602 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
7603 inchash::add_expr (TREE_OPERAND (t, i), hstate);
7604 }
7605 return;
7606 }
7607 }
7608
7609 }
7610
7611 /* Constructors for pointer, array and function types.
7612 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7613 constructed by language-dependent code, not here.) */
7614
7615 /* Construct, lay out and return the type of pointers to TO_TYPE with
7616 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7617 reference all of memory. If such a type has already been
7618 constructed, reuse it. */
7619
7620 tree
7621 build_pointer_type_for_mode (tree to_type, machine_mode mode,
7622 bool can_alias_all)
7623 {
7624 tree t;
7625
7626 if (to_type == error_mark_node)
7627 return error_mark_node;
7628
7629 /* If the pointed-to type has the may_alias attribute set, force
7630 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7631 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7632 can_alias_all = true;
7633
7634 /* In some cases, languages will have things that aren't a POINTER_TYPE
7635 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7636 In that case, return that type without regard to the rest of our
7637 operands.
7638
7639 ??? This is a kludge, but consistent with the way this function has
7640 always operated and there doesn't seem to be a good way to avoid this
7641 at the moment. */
7642 if (TYPE_POINTER_TO (to_type) != 0
7643 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7644 return TYPE_POINTER_TO (to_type);
7645
7646 /* First, if we already have a type for pointers to TO_TYPE and it's
7647 the proper mode, use it. */
7648 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7649 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7650 return t;
7651
7652 t = make_node (POINTER_TYPE);
7653
7654 TREE_TYPE (t) = to_type;
7655 SET_TYPE_MODE (t, mode);
7656 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7657 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7658 TYPE_POINTER_TO (to_type) = t;
7659
7660 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7661 SET_TYPE_STRUCTURAL_EQUALITY (t);
7662 else if (TYPE_CANONICAL (to_type) != to_type)
7663 TYPE_CANONICAL (t)
7664 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7665 mode, can_alias_all);
7666
7667 /* Lay out the type. This function has many callers that are concerned
7668 with expression-construction, and this simplifies them all. */
7669 layout_type (t);
7670
7671 return t;
7672 }
7673
7674 /* By default build pointers in ptr_mode. */
7675
7676 tree
7677 build_pointer_type (tree to_type)
7678 {
7679 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7680 : TYPE_ADDR_SPACE (to_type);
7681 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7682 return build_pointer_type_for_mode (to_type, pointer_mode, false);
7683 }
7684
7685 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7686
7687 tree
7688 build_reference_type_for_mode (tree to_type, machine_mode mode,
7689 bool can_alias_all)
7690 {
7691 tree t;
7692
7693 if (to_type == error_mark_node)
7694 return error_mark_node;
7695
7696 /* If the pointed-to type has the may_alias attribute set, force
7697 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7698 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7699 can_alias_all = true;
7700
7701 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7702 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7703 In that case, return that type without regard to the rest of our
7704 operands.
7705
7706 ??? This is a kludge, but consistent with the way this function has
7707 always operated and there doesn't seem to be a good way to avoid this
7708 at the moment. */
7709 if (TYPE_REFERENCE_TO (to_type) != 0
7710 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7711 return TYPE_REFERENCE_TO (to_type);
7712
7713 /* First, if we already have a type for pointers to TO_TYPE and it's
7714 the proper mode, use it. */
7715 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7716 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7717 return t;
7718
7719 t = make_node (REFERENCE_TYPE);
7720
7721 TREE_TYPE (t) = to_type;
7722 SET_TYPE_MODE (t, mode);
7723 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7724 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7725 TYPE_REFERENCE_TO (to_type) = t;
7726
7727 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7728 SET_TYPE_STRUCTURAL_EQUALITY (t);
7729 else if (TYPE_CANONICAL (to_type) != to_type)
7730 TYPE_CANONICAL (t)
7731 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7732 mode, can_alias_all);
7733
7734 layout_type (t);
7735
7736 return t;
7737 }
7738
7739
7740 /* Build the node for the type of references-to-TO_TYPE by default
7741 in ptr_mode. */
7742
7743 tree
7744 build_reference_type (tree to_type)
7745 {
7746 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7747 : TYPE_ADDR_SPACE (to_type);
7748 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7749 return build_reference_type_for_mode (to_type, pointer_mode, false);
7750 }
7751
7752 #define MAX_INT_CACHED_PREC \
7753 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7754 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7755
7756 /* Builds a signed or unsigned integer type of precision PRECISION.
7757 Used for C bitfields whose precision does not match that of
7758 built-in target types. */
7759 tree
7760 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7761 int unsignedp)
7762 {
7763 tree itype, ret;
7764
7765 if (unsignedp)
7766 unsignedp = MAX_INT_CACHED_PREC + 1;
7767
7768 if (precision <= MAX_INT_CACHED_PREC)
7769 {
7770 itype = nonstandard_integer_type_cache[precision + unsignedp];
7771 if (itype)
7772 return itype;
7773 }
7774
7775 itype = make_node (INTEGER_TYPE);
7776 TYPE_PRECISION (itype) = precision;
7777
7778 if (unsignedp)
7779 fixup_unsigned_type (itype);
7780 else
7781 fixup_signed_type (itype);
7782
7783 ret = itype;
7784 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype)))
7785 ret = type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype)), itype);
7786 if (precision <= MAX_INT_CACHED_PREC)
7787 nonstandard_integer_type_cache[precision + unsignedp] = ret;
7788
7789 return ret;
7790 }
7791
7792 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7793 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7794 is true, reuse such a type that has already been constructed. */
7795
7796 static tree
7797 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7798 {
7799 tree itype = make_node (INTEGER_TYPE);
7800 inchash::hash hstate;
7801
7802 TREE_TYPE (itype) = type;
7803
7804 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7805 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7806
7807 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7808 SET_TYPE_MODE (itype, TYPE_MODE (type));
7809 TYPE_SIZE (itype) = TYPE_SIZE (type);
7810 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7811 TYPE_ALIGN (itype) = TYPE_ALIGN (type);
7812 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7813
7814 if (!shared)
7815 return itype;
7816
7817 if ((TYPE_MIN_VALUE (itype)
7818 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7819 || (TYPE_MAX_VALUE (itype)
7820 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7821 {
7822 /* Since we cannot reliably merge this type, we need to compare it using
7823 structural equality checks. */
7824 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7825 return itype;
7826 }
7827
7828 inchash::add_expr (TYPE_MIN_VALUE (itype), hstate);
7829 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
7830 hstate.merge_hash (TYPE_HASH (type));
7831 itype = type_hash_canon (hstate.end (), itype);
7832
7833 return itype;
7834 }
7835
7836 /* Wrapper around build_range_type_1 with SHARED set to true. */
7837
7838 tree
7839 build_range_type (tree type, tree lowval, tree highval)
7840 {
7841 return build_range_type_1 (type, lowval, highval, true);
7842 }
7843
7844 /* Wrapper around build_range_type_1 with SHARED set to false. */
7845
7846 tree
7847 build_nonshared_range_type (tree type, tree lowval, tree highval)
7848 {
7849 return build_range_type_1 (type, lowval, highval, false);
7850 }
7851
7852 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7853 MAXVAL should be the maximum value in the domain
7854 (one less than the length of the array).
7855
7856 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7857 We don't enforce this limit, that is up to caller (e.g. language front end).
7858 The limit exists because the result is a signed type and we don't handle
7859 sizes that use more than one HOST_WIDE_INT. */
7860
7861 tree
7862 build_index_type (tree maxval)
7863 {
7864 return build_range_type (sizetype, size_zero_node, maxval);
7865 }
7866
7867 /* Return true if the debug information for TYPE, a subtype, should be emitted
7868 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7869 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7870 debug info and doesn't reflect the source code. */
7871
7872 bool
7873 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7874 {
7875 tree base_type = TREE_TYPE (type), low, high;
7876
7877 /* Subrange types have a base type which is an integral type. */
7878 if (!INTEGRAL_TYPE_P (base_type))
7879 return false;
7880
7881 /* Get the real bounds of the subtype. */
7882 if (lang_hooks.types.get_subrange_bounds)
7883 lang_hooks.types.get_subrange_bounds (type, &low, &high);
7884 else
7885 {
7886 low = TYPE_MIN_VALUE (type);
7887 high = TYPE_MAX_VALUE (type);
7888 }
7889
7890 /* If the type and its base type have the same representation and the same
7891 name, then the type is not a subrange but a copy of the base type. */
7892 if ((TREE_CODE (base_type) == INTEGER_TYPE
7893 || TREE_CODE (base_type) == BOOLEAN_TYPE)
7894 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
7895 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
7896 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
7897 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
7898 return false;
7899
7900 if (lowval)
7901 *lowval = low;
7902 if (highval)
7903 *highval = high;
7904 return true;
7905 }
7906
7907 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7908 and number of elements specified by the range of values of INDEX_TYPE.
7909 If SHARED is true, reuse such a type that has already been constructed. */
7910
7911 static tree
7912 build_array_type_1 (tree elt_type, tree index_type, bool shared)
7913 {
7914 tree t;
7915
7916 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
7917 {
7918 error ("arrays of functions are not meaningful");
7919 elt_type = integer_type_node;
7920 }
7921
7922 t = make_node (ARRAY_TYPE);
7923 TREE_TYPE (t) = elt_type;
7924 TYPE_DOMAIN (t) = index_type;
7925 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
7926 layout_type (t);
7927
7928 /* If the element type is incomplete at this point we get marked for
7929 structural equality. Do not record these types in the canonical
7930 type hashtable. */
7931 if (TYPE_STRUCTURAL_EQUALITY_P (t))
7932 return t;
7933
7934 if (shared)
7935 {
7936 inchash::hash hstate;
7937 hstate.add_object (TYPE_HASH (elt_type));
7938 if (index_type)
7939 hstate.add_object (TYPE_HASH (index_type));
7940 t = type_hash_canon (hstate.end (), t);
7941 }
7942
7943 if (TYPE_CANONICAL (t) == t)
7944 {
7945 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
7946 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type)))
7947 SET_TYPE_STRUCTURAL_EQUALITY (t);
7948 else if (TYPE_CANONICAL (elt_type) != elt_type
7949 || (index_type && TYPE_CANONICAL (index_type) != index_type))
7950 TYPE_CANONICAL (t)
7951 = build_array_type_1 (TYPE_CANONICAL (elt_type),
7952 index_type
7953 ? TYPE_CANONICAL (index_type) : NULL_TREE,
7954 shared);
7955 }
7956
7957 return t;
7958 }
7959
7960 /* Wrapper around build_array_type_1 with SHARED set to true. */
7961
7962 tree
7963 build_array_type (tree elt_type, tree index_type)
7964 {
7965 return build_array_type_1 (elt_type, index_type, true);
7966 }
7967
7968 /* Wrapper around build_array_type_1 with SHARED set to false. */
7969
7970 tree
7971 build_nonshared_array_type (tree elt_type, tree index_type)
7972 {
7973 return build_array_type_1 (elt_type, index_type, false);
7974 }
7975
7976 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7977 sizetype. */
7978
7979 tree
7980 build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
7981 {
7982 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
7983 }
7984
7985 /* Recursively examines the array elements of TYPE, until a non-array
7986 element type is found. */
7987
7988 tree
7989 strip_array_types (tree type)
7990 {
7991 while (TREE_CODE (type) == ARRAY_TYPE)
7992 type = TREE_TYPE (type);
7993
7994 return type;
7995 }
7996
7997 /* Computes the canonical argument types from the argument type list
7998 ARGTYPES.
7999
8000 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
8001 on entry to this function, or if any of the ARGTYPES are
8002 structural.
8003
8004 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
8005 true on entry to this function, or if any of the ARGTYPES are
8006 non-canonical.
8007
8008 Returns a canonical argument list, which may be ARGTYPES when the
8009 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
8010 true) or would not differ from ARGTYPES. */
8011
8012 static tree
8013 maybe_canonicalize_argtypes (tree argtypes,
8014 bool *any_structural_p,
8015 bool *any_noncanonical_p)
8016 {
8017 tree arg;
8018 bool any_noncanonical_argtypes_p = false;
8019
8020 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
8021 {
8022 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
8023 /* Fail gracefully by stating that the type is structural. */
8024 *any_structural_p = true;
8025 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
8026 *any_structural_p = true;
8027 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
8028 || TREE_PURPOSE (arg))
8029 /* If the argument has a default argument, we consider it
8030 non-canonical even though the type itself is canonical.
8031 That way, different variants of function and method types
8032 with default arguments will all point to the variant with
8033 no defaults as their canonical type. */
8034 any_noncanonical_argtypes_p = true;
8035 }
8036
8037 if (*any_structural_p)
8038 return argtypes;
8039
8040 if (any_noncanonical_argtypes_p)
8041 {
8042 /* Build the canonical list of argument types. */
8043 tree canon_argtypes = NULL_TREE;
8044 bool is_void = false;
8045
8046 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
8047 {
8048 if (arg == void_list_node)
8049 is_void = true;
8050 else
8051 canon_argtypes = tree_cons (NULL_TREE,
8052 TYPE_CANONICAL (TREE_VALUE (arg)),
8053 canon_argtypes);
8054 }
8055
8056 canon_argtypes = nreverse (canon_argtypes);
8057 if (is_void)
8058 canon_argtypes = chainon (canon_argtypes, void_list_node);
8059
8060 /* There is a non-canonical type. */
8061 *any_noncanonical_p = true;
8062 return canon_argtypes;
8063 }
8064
8065 /* The canonical argument types are the same as ARGTYPES. */
8066 return argtypes;
8067 }
8068
8069 /* Construct, lay out and return
8070 the type of functions returning type VALUE_TYPE
8071 given arguments of types ARG_TYPES.
8072 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
8073 are data type nodes for the arguments of the function.
8074 If such a type has already been constructed, reuse it. */
8075
8076 tree
8077 build_function_type (tree value_type, tree arg_types)
8078 {
8079 tree t;
8080 inchash::hash hstate;
8081 bool any_structural_p, any_noncanonical_p;
8082 tree canon_argtypes;
8083
8084 if (TREE_CODE (value_type) == FUNCTION_TYPE)
8085 {
8086 error ("function return type cannot be function");
8087 value_type = integer_type_node;
8088 }
8089
8090 /* Make a node of the sort we want. */
8091 t = make_node (FUNCTION_TYPE);
8092 TREE_TYPE (t) = value_type;
8093 TYPE_ARG_TYPES (t) = arg_types;
8094
8095 /* If we already have such a type, use the old one. */
8096 hstate.add_object (TYPE_HASH (value_type));
8097 type_hash_list (arg_types, hstate);
8098 t = type_hash_canon (hstate.end (), t);
8099
8100 /* Set up the canonical type. */
8101 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8102 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8103 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8104 &any_structural_p,
8105 &any_noncanonical_p);
8106 if (any_structural_p)
8107 SET_TYPE_STRUCTURAL_EQUALITY (t);
8108 else if (any_noncanonical_p)
8109 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8110 canon_argtypes);
8111
8112 if (!COMPLETE_TYPE_P (t))
8113 layout_type (t);
8114 return t;
8115 }
8116
8117 /* Build a function type. The RETURN_TYPE is the type returned by the
8118 function. If VAARGS is set, no void_type_node is appended to the
8119 the list. ARGP must be always be terminated be a NULL_TREE. */
8120
8121 static tree
8122 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8123 {
8124 tree t, args, last;
8125
8126 t = va_arg (argp, tree);
8127 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8128 args = tree_cons (NULL_TREE, t, args);
8129
8130 if (vaargs)
8131 {
8132 last = args;
8133 if (args != NULL_TREE)
8134 args = nreverse (args);
8135 gcc_assert (last != void_list_node);
8136 }
8137 else if (args == NULL_TREE)
8138 args = void_list_node;
8139 else
8140 {
8141 last = args;
8142 args = nreverse (args);
8143 TREE_CHAIN (last) = void_list_node;
8144 }
8145 args = build_function_type (return_type, args);
8146
8147 return args;
8148 }
8149
8150 /* Build a function type. The RETURN_TYPE is the type returned by the
8151 function. If additional arguments are provided, they are
8152 additional argument types. The list of argument types must always
8153 be terminated by NULL_TREE. */
8154
8155 tree
8156 build_function_type_list (tree return_type, ...)
8157 {
8158 tree args;
8159 va_list p;
8160
8161 va_start (p, return_type);
8162 args = build_function_type_list_1 (false, return_type, p);
8163 va_end (p);
8164 return args;
8165 }
8166
8167 /* Build a variable argument function type. The RETURN_TYPE is the
8168 type returned by the function. If additional arguments are provided,
8169 they are additional argument types. The list of argument types must
8170 always be terminated by NULL_TREE. */
8171
8172 tree
8173 build_varargs_function_type_list (tree return_type, ...)
8174 {
8175 tree args;
8176 va_list p;
8177
8178 va_start (p, return_type);
8179 args = build_function_type_list_1 (true, return_type, p);
8180 va_end (p);
8181
8182 return args;
8183 }
8184
8185 /* Build a function type. RETURN_TYPE is the type returned by the
8186 function; VAARGS indicates whether the function takes varargs. The
8187 function takes N named arguments, the types of which are provided in
8188 ARG_TYPES. */
8189
8190 static tree
8191 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8192 tree *arg_types)
8193 {
8194 int i;
8195 tree t = vaargs ? NULL_TREE : void_list_node;
8196
8197 for (i = n - 1; i >= 0; i--)
8198 t = tree_cons (NULL_TREE, arg_types[i], t);
8199
8200 return build_function_type (return_type, t);
8201 }
8202
8203 /* Build a function type. RETURN_TYPE is the type returned by the
8204 function. The function takes N named arguments, the types of which
8205 are provided in ARG_TYPES. */
8206
8207 tree
8208 build_function_type_array (tree return_type, int n, tree *arg_types)
8209 {
8210 return build_function_type_array_1 (false, return_type, n, arg_types);
8211 }
8212
8213 /* Build a variable argument function type. RETURN_TYPE is the type
8214 returned by the function. The function takes N named arguments, the
8215 types of which are provided in ARG_TYPES. */
8216
8217 tree
8218 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8219 {
8220 return build_function_type_array_1 (true, return_type, n, arg_types);
8221 }
8222
8223 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8224 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8225 for the method. An implicit additional parameter (of type
8226 pointer-to-BASETYPE) is added to the ARGTYPES. */
8227
8228 tree
8229 build_method_type_directly (tree basetype,
8230 tree rettype,
8231 tree argtypes)
8232 {
8233 tree t;
8234 tree ptype;
8235 inchash::hash hstate;
8236 bool any_structural_p, any_noncanonical_p;
8237 tree canon_argtypes;
8238
8239 /* Make a node of the sort we want. */
8240 t = make_node (METHOD_TYPE);
8241
8242 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8243 TREE_TYPE (t) = rettype;
8244 ptype = build_pointer_type (basetype);
8245
8246 /* The actual arglist for this function includes a "hidden" argument
8247 which is "this". Put it into the list of argument types. */
8248 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8249 TYPE_ARG_TYPES (t) = argtypes;
8250
8251 /* If we already have such a type, use the old one. */
8252 hstate.add_object (TYPE_HASH (basetype));
8253 hstate.add_object (TYPE_HASH (rettype));
8254 type_hash_list (argtypes, hstate);
8255 t = type_hash_canon (hstate.end (), t);
8256
8257 /* Set up the canonical type. */
8258 any_structural_p
8259 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8260 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8261 any_noncanonical_p
8262 = (TYPE_CANONICAL (basetype) != basetype
8263 || TYPE_CANONICAL (rettype) != rettype);
8264 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8265 &any_structural_p,
8266 &any_noncanonical_p);
8267 if (any_structural_p)
8268 SET_TYPE_STRUCTURAL_EQUALITY (t);
8269 else if (any_noncanonical_p)
8270 TYPE_CANONICAL (t)
8271 = build_method_type_directly (TYPE_CANONICAL (basetype),
8272 TYPE_CANONICAL (rettype),
8273 canon_argtypes);
8274 if (!COMPLETE_TYPE_P (t))
8275 layout_type (t);
8276
8277 return t;
8278 }
8279
8280 /* Construct, lay out and return the type of methods belonging to class
8281 BASETYPE and whose arguments and values are described by TYPE.
8282 If that type exists already, reuse it.
8283 TYPE must be a FUNCTION_TYPE node. */
8284
8285 tree
8286 build_method_type (tree basetype, tree type)
8287 {
8288 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8289
8290 return build_method_type_directly (basetype,
8291 TREE_TYPE (type),
8292 TYPE_ARG_TYPES (type));
8293 }
8294
8295 /* Construct, lay out and return the type of offsets to a value
8296 of type TYPE, within an object of type BASETYPE.
8297 If a suitable offset type exists already, reuse it. */
8298
8299 tree
8300 build_offset_type (tree basetype, tree type)
8301 {
8302 tree t;
8303 inchash::hash hstate;
8304
8305 /* Make a node of the sort we want. */
8306 t = make_node (OFFSET_TYPE);
8307
8308 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8309 TREE_TYPE (t) = type;
8310
8311 /* If we already have such a type, use the old one. */
8312 hstate.add_object (TYPE_HASH (basetype));
8313 hstate.add_object (TYPE_HASH (type));
8314 t = type_hash_canon (hstate.end (), t);
8315
8316 if (!COMPLETE_TYPE_P (t))
8317 layout_type (t);
8318
8319 if (TYPE_CANONICAL (t) == t)
8320 {
8321 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8322 || TYPE_STRUCTURAL_EQUALITY_P (type))
8323 SET_TYPE_STRUCTURAL_EQUALITY (t);
8324 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8325 || TYPE_CANONICAL (type) != type)
8326 TYPE_CANONICAL (t)
8327 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8328 TYPE_CANONICAL (type));
8329 }
8330
8331 return t;
8332 }
8333
8334 /* Create a complex type whose components are COMPONENT_TYPE. */
8335
8336 tree
8337 build_complex_type (tree component_type)
8338 {
8339 tree t;
8340 inchash::hash hstate;
8341
8342 gcc_assert (INTEGRAL_TYPE_P (component_type)
8343 || SCALAR_FLOAT_TYPE_P (component_type)
8344 || FIXED_POINT_TYPE_P (component_type));
8345
8346 /* Make a node of the sort we want. */
8347 t = make_node (COMPLEX_TYPE);
8348
8349 TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type);
8350
8351 /* If we already have such a type, use the old one. */
8352 hstate.add_object (TYPE_HASH (component_type));
8353 t = type_hash_canon (hstate.end (), t);
8354
8355 if (!COMPLETE_TYPE_P (t))
8356 layout_type (t);
8357
8358 if (TYPE_CANONICAL (t) == t)
8359 {
8360 if (TYPE_STRUCTURAL_EQUALITY_P (component_type))
8361 SET_TYPE_STRUCTURAL_EQUALITY (t);
8362 else if (TYPE_CANONICAL (component_type) != component_type)
8363 TYPE_CANONICAL (t)
8364 = build_complex_type (TYPE_CANONICAL (component_type));
8365 }
8366
8367 /* We need to create a name, since complex is a fundamental type. */
8368 if (! TYPE_NAME (t))
8369 {
8370 const char *name;
8371 if (component_type == char_type_node)
8372 name = "complex char";
8373 else if (component_type == signed_char_type_node)
8374 name = "complex signed char";
8375 else if (component_type == unsigned_char_type_node)
8376 name = "complex unsigned char";
8377 else if (component_type == short_integer_type_node)
8378 name = "complex short int";
8379 else if (component_type == short_unsigned_type_node)
8380 name = "complex short unsigned int";
8381 else if (component_type == integer_type_node)
8382 name = "complex int";
8383 else if (component_type == unsigned_type_node)
8384 name = "complex unsigned int";
8385 else if (component_type == long_integer_type_node)
8386 name = "complex long int";
8387 else if (component_type == long_unsigned_type_node)
8388 name = "complex long unsigned int";
8389 else if (component_type == long_long_integer_type_node)
8390 name = "complex long long int";
8391 else if (component_type == long_long_unsigned_type_node)
8392 name = "complex long long unsigned int";
8393 else
8394 name = 0;
8395
8396 if (name != 0)
8397 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8398 get_identifier (name), t);
8399 }
8400
8401 return build_qualified_type (t, TYPE_QUALS (component_type));
8402 }
8403
8404 /* If TYPE is a real or complex floating-point type and the target
8405 does not directly support arithmetic on TYPE then return the wider
8406 type to be used for arithmetic on TYPE. Otherwise, return
8407 NULL_TREE. */
8408
8409 tree
8410 excess_precision_type (tree type)
8411 {
8412 if (flag_excess_precision != EXCESS_PRECISION_FAST)
8413 {
8414 int flt_eval_method = TARGET_FLT_EVAL_METHOD;
8415 switch (TREE_CODE (type))
8416 {
8417 case REAL_TYPE:
8418 switch (flt_eval_method)
8419 {
8420 case 1:
8421 if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
8422 return double_type_node;
8423 break;
8424 case 2:
8425 if (TYPE_MODE (type) == TYPE_MODE (float_type_node)
8426 || TYPE_MODE (type) == TYPE_MODE (double_type_node))
8427 return long_double_type_node;
8428 break;
8429 default:
8430 gcc_unreachable ();
8431 }
8432 break;
8433 case COMPLEX_TYPE:
8434 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8435 return NULL_TREE;
8436 switch (flt_eval_method)
8437 {
8438 case 1:
8439 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node))
8440 return complex_double_type_node;
8441 break;
8442 case 2:
8443 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node)
8444 || (TYPE_MODE (TREE_TYPE (type))
8445 == TYPE_MODE (double_type_node)))
8446 return complex_long_double_type_node;
8447 break;
8448 default:
8449 gcc_unreachable ();
8450 }
8451 break;
8452 default:
8453 break;
8454 }
8455 }
8456 return NULL_TREE;
8457 }
8458 \f
8459 /* Return OP, stripped of any conversions to wider types as much as is safe.
8460 Converting the value back to OP's type makes a value equivalent to OP.
8461
8462 If FOR_TYPE is nonzero, we return a value which, if converted to
8463 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8464
8465 OP must have integer, real or enumeral type. Pointers are not allowed!
8466
8467 There are some cases where the obvious value we could return
8468 would regenerate to OP if converted to OP's type,
8469 but would not extend like OP to wider types.
8470 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8471 For example, if OP is (unsigned short)(signed char)-1,
8472 we avoid returning (signed char)-1 if FOR_TYPE is int,
8473 even though extending that to an unsigned short would regenerate OP,
8474 since the result of extending (signed char)-1 to (int)
8475 is different from (int) OP. */
8476
8477 tree
8478 get_unwidened (tree op, tree for_type)
8479 {
8480 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8481 tree type = TREE_TYPE (op);
8482 unsigned final_prec
8483 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8484 int uns
8485 = (for_type != 0 && for_type != type
8486 && final_prec > TYPE_PRECISION (type)
8487 && TYPE_UNSIGNED (type));
8488 tree win = op;
8489
8490 while (CONVERT_EXPR_P (op))
8491 {
8492 int bitschange;
8493
8494 /* TYPE_PRECISION on vector types has different meaning
8495 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8496 so avoid them here. */
8497 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8498 break;
8499
8500 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8501 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8502
8503 /* Truncations are many-one so cannot be removed.
8504 Unless we are later going to truncate down even farther. */
8505 if (bitschange < 0
8506 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8507 break;
8508
8509 /* See what's inside this conversion. If we decide to strip it,
8510 we will set WIN. */
8511 op = TREE_OPERAND (op, 0);
8512
8513 /* If we have not stripped any zero-extensions (uns is 0),
8514 we can strip any kind of extension.
8515 If we have previously stripped a zero-extension,
8516 only zero-extensions can safely be stripped.
8517 Any extension can be stripped if the bits it would produce
8518 are all going to be discarded later by truncating to FOR_TYPE. */
8519
8520 if (bitschange > 0)
8521 {
8522 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8523 win = op;
8524 /* TYPE_UNSIGNED says whether this is a zero-extension.
8525 Let's avoid computing it if it does not affect WIN
8526 and if UNS will not be needed again. */
8527 if ((uns
8528 || CONVERT_EXPR_P (op))
8529 && TYPE_UNSIGNED (TREE_TYPE (op)))
8530 {
8531 uns = 1;
8532 win = op;
8533 }
8534 }
8535 }
8536
8537 /* If we finally reach a constant see if it fits in for_type and
8538 in that case convert it. */
8539 if (for_type
8540 && TREE_CODE (win) == INTEGER_CST
8541 && TREE_TYPE (win) != for_type
8542 && int_fits_type_p (win, for_type))
8543 win = fold_convert (for_type, win);
8544
8545 return win;
8546 }
8547 \f
8548 /* Return OP or a simpler expression for a narrower value
8549 which can be sign-extended or zero-extended to give back OP.
8550 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8551 or 0 if the value should be sign-extended. */
8552
8553 tree
8554 get_narrower (tree op, int *unsignedp_ptr)
8555 {
8556 int uns = 0;
8557 int first = 1;
8558 tree win = op;
8559 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8560
8561 while (TREE_CODE (op) == NOP_EXPR)
8562 {
8563 int bitschange
8564 = (TYPE_PRECISION (TREE_TYPE (op))
8565 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8566
8567 /* Truncations are many-one so cannot be removed. */
8568 if (bitschange < 0)
8569 break;
8570
8571 /* See what's inside this conversion. If we decide to strip it,
8572 we will set WIN. */
8573
8574 if (bitschange > 0)
8575 {
8576 op = TREE_OPERAND (op, 0);
8577 /* An extension: the outermost one can be stripped,
8578 but remember whether it is zero or sign extension. */
8579 if (first)
8580 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8581 /* Otherwise, if a sign extension has been stripped,
8582 only sign extensions can now be stripped;
8583 if a zero extension has been stripped, only zero-extensions. */
8584 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8585 break;
8586 first = 0;
8587 }
8588 else /* bitschange == 0 */
8589 {
8590 /* A change in nominal type can always be stripped, but we must
8591 preserve the unsignedness. */
8592 if (first)
8593 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8594 first = 0;
8595 op = TREE_OPERAND (op, 0);
8596 /* Keep trying to narrow, but don't assign op to win if it
8597 would turn an integral type into something else. */
8598 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8599 continue;
8600 }
8601
8602 win = op;
8603 }
8604
8605 if (TREE_CODE (op) == COMPONENT_REF
8606 /* Since type_for_size always gives an integer type. */
8607 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8608 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8609 /* Ensure field is laid out already. */
8610 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8611 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8612 {
8613 unsigned HOST_WIDE_INT innerprec
8614 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8615 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8616 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8617 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8618
8619 /* We can get this structure field in a narrower type that fits it,
8620 but the resulting extension to its nominal type (a fullword type)
8621 must satisfy the same conditions as for other extensions.
8622
8623 Do this only for fields that are aligned (not bit-fields),
8624 because when bit-field insns will be used there is no
8625 advantage in doing this. */
8626
8627 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8628 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8629 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8630 && type != 0)
8631 {
8632 if (first)
8633 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8634 win = fold_convert (type, op);
8635 }
8636 }
8637
8638 *unsignedp_ptr = uns;
8639 return win;
8640 }
8641 \f
8642 /* Returns true if integer constant C has a value that is permissible
8643 for type TYPE (an INTEGER_TYPE). */
8644
8645 bool
8646 int_fits_type_p (const_tree c, const_tree type)
8647 {
8648 tree type_low_bound, type_high_bound;
8649 bool ok_for_low_bound, ok_for_high_bound;
8650 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8651
8652 retry:
8653 type_low_bound = TYPE_MIN_VALUE (type);
8654 type_high_bound = TYPE_MAX_VALUE (type);
8655
8656 /* If at least one bound of the type is a constant integer, we can check
8657 ourselves and maybe make a decision. If no such decision is possible, but
8658 this type is a subtype, try checking against that. Otherwise, use
8659 fits_to_tree_p, which checks against the precision.
8660
8661 Compute the status for each possibly constant bound, and return if we see
8662 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8663 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8664 for "constant known to fit". */
8665
8666 /* Check if c >= type_low_bound. */
8667 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8668 {
8669 if (tree_int_cst_lt (c, type_low_bound))
8670 return false;
8671 ok_for_low_bound = true;
8672 }
8673 else
8674 ok_for_low_bound = false;
8675
8676 /* Check if c <= type_high_bound. */
8677 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8678 {
8679 if (tree_int_cst_lt (type_high_bound, c))
8680 return false;
8681 ok_for_high_bound = true;
8682 }
8683 else
8684 ok_for_high_bound = false;
8685
8686 /* If the constant fits both bounds, the result is known. */
8687 if (ok_for_low_bound && ok_for_high_bound)
8688 return true;
8689
8690 /* Perform some generic filtering which may allow making a decision
8691 even if the bounds are not constant. First, negative integers
8692 never fit in unsigned types, */
8693 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (c))
8694 return false;
8695
8696 /* Second, narrower types always fit in wider ones. */
8697 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8698 return true;
8699
8700 /* Third, unsigned integers with top bit set never fit signed types. */
8701 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8702 {
8703 int prec = GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (c))) - 1;
8704 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8705 {
8706 /* When a tree_cst is converted to a wide-int, the precision
8707 is taken from the type. However, if the precision of the
8708 mode underneath the type is smaller than that, it is
8709 possible that the value will not fit. The test below
8710 fails if any bit is set between the sign bit of the
8711 underlying mode and the top bit of the type. */
8712 if (wi::ne_p (wi::zext (c, prec - 1), c))
8713 return false;
8714 }
8715 else if (wi::neg_p (c))
8716 return false;
8717 }
8718
8719 /* If we haven't been able to decide at this point, there nothing more we
8720 can check ourselves here. Look at the base type if we have one and it
8721 has the same precision. */
8722 if (TREE_CODE (type) == INTEGER_TYPE
8723 && TREE_TYPE (type) != 0
8724 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8725 {
8726 type = TREE_TYPE (type);
8727 goto retry;
8728 }
8729
8730 /* Or to fits_to_tree_p, if nothing else. */
8731 return wi::fits_to_tree_p (c, type);
8732 }
8733
8734 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8735 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8736 represented (assuming two's-complement arithmetic) within the bit
8737 precision of the type are returned instead. */
8738
8739 void
8740 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8741 {
8742 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8743 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8744 wi::to_mpz (TYPE_MIN_VALUE (type), min, TYPE_SIGN (type));
8745 else
8746 {
8747 if (TYPE_UNSIGNED (type))
8748 mpz_set_ui (min, 0);
8749 else
8750 {
8751 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8752 wi::to_mpz (mn, min, SIGNED);
8753 }
8754 }
8755
8756 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8757 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8758 wi::to_mpz (TYPE_MAX_VALUE (type), max, TYPE_SIGN (type));
8759 else
8760 {
8761 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8762 wi::to_mpz (mn, max, TYPE_SIGN (type));
8763 }
8764 }
8765
8766 /* Return true if VAR is an automatic variable defined in function FN. */
8767
8768 bool
8769 auto_var_in_fn_p (const_tree var, const_tree fn)
8770 {
8771 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8772 && ((((TREE_CODE (var) == VAR_DECL && ! DECL_EXTERNAL (var))
8773 || TREE_CODE (var) == PARM_DECL)
8774 && ! TREE_STATIC (var))
8775 || TREE_CODE (var) == LABEL_DECL
8776 || TREE_CODE (var) == RESULT_DECL));
8777 }
8778
8779 /* Subprogram of following function. Called by walk_tree.
8780
8781 Return *TP if it is an automatic variable or parameter of the
8782 function passed in as DATA. */
8783
8784 static tree
8785 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8786 {
8787 tree fn = (tree) data;
8788
8789 if (TYPE_P (*tp))
8790 *walk_subtrees = 0;
8791
8792 else if (DECL_P (*tp)
8793 && auto_var_in_fn_p (*tp, fn))
8794 return *tp;
8795
8796 return NULL_TREE;
8797 }
8798
8799 /* Returns true if T is, contains, or refers to a type with variable
8800 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8801 arguments, but not the return type. If FN is nonzero, only return
8802 true if a modifier of the type or position of FN is a variable or
8803 parameter inside FN.
8804
8805 This concept is more general than that of C99 'variably modified types':
8806 in C99, a struct type is never variably modified because a VLA may not
8807 appear as a structure member. However, in GNU C code like:
8808
8809 struct S { int i[f()]; };
8810
8811 is valid, and other languages may define similar constructs. */
8812
8813 bool
8814 variably_modified_type_p (tree type, tree fn)
8815 {
8816 tree t;
8817
8818 /* Test if T is either variable (if FN is zero) or an expression containing
8819 a variable in FN. If TYPE isn't gimplified, return true also if
8820 gimplify_one_sizepos would gimplify the expression into a local
8821 variable. */
8822 #define RETURN_TRUE_IF_VAR(T) \
8823 do { tree _t = (T); \
8824 if (_t != NULL_TREE \
8825 && _t != error_mark_node \
8826 && TREE_CODE (_t) != INTEGER_CST \
8827 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8828 && (!fn \
8829 || (!TYPE_SIZES_GIMPLIFIED (type) \
8830 && !is_gimple_sizepos (_t)) \
8831 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8832 return true; } while (0)
8833
8834 if (type == error_mark_node)
8835 return false;
8836
8837 /* If TYPE itself has variable size, it is variably modified. */
8838 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8839 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8840
8841 switch (TREE_CODE (type))
8842 {
8843 case POINTER_TYPE:
8844 case REFERENCE_TYPE:
8845 case VECTOR_TYPE:
8846 if (variably_modified_type_p (TREE_TYPE (type), fn))
8847 return true;
8848 break;
8849
8850 case FUNCTION_TYPE:
8851 case METHOD_TYPE:
8852 /* If TYPE is a function type, it is variably modified if the
8853 return type is variably modified. */
8854 if (variably_modified_type_p (TREE_TYPE (type), fn))
8855 return true;
8856 break;
8857
8858 case INTEGER_TYPE:
8859 case REAL_TYPE:
8860 case FIXED_POINT_TYPE:
8861 case ENUMERAL_TYPE:
8862 case BOOLEAN_TYPE:
8863 /* Scalar types are variably modified if their end points
8864 aren't constant. */
8865 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8866 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8867 break;
8868
8869 case RECORD_TYPE:
8870 case UNION_TYPE:
8871 case QUAL_UNION_TYPE:
8872 /* We can't see if any of the fields are variably-modified by the
8873 definition we normally use, since that would produce infinite
8874 recursion via pointers. */
8875 /* This is variably modified if some field's type is. */
8876 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8877 if (TREE_CODE (t) == FIELD_DECL)
8878 {
8879 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8880 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8881 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8882
8883 if (TREE_CODE (type) == QUAL_UNION_TYPE)
8884 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
8885 }
8886 break;
8887
8888 case ARRAY_TYPE:
8889 /* Do not call ourselves to avoid infinite recursion. This is
8890 variably modified if the element type is. */
8891 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
8892 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
8893 break;
8894
8895 default:
8896 break;
8897 }
8898
8899 /* The current language may have other cases to check, but in general,
8900 all other types are not variably modified. */
8901 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
8902
8903 #undef RETURN_TRUE_IF_VAR
8904 }
8905
8906 /* Given a DECL or TYPE, return the scope in which it was declared, or
8907 NULL_TREE if there is no containing scope. */
8908
8909 tree
8910 get_containing_scope (const_tree t)
8911 {
8912 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
8913 }
8914
8915 /* Return the innermost context enclosing DECL that is
8916 a FUNCTION_DECL, or zero if none. */
8917
8918 tree
8919 decl_function_context (const_tree decl)
8920 {
8921 tree context;
8922
8923 if (TREE_CODE (decl) == ERROR_MARK)
8924 return 0;
8925
8926 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8927 where we look up the function at runtime. Such functions always take
8928 a first argument of type 'pointer to real context'.
8929
8930 C++ should really be fixed to use DECL_CONTEXT for the real context,
8931 and use something else for the "virtual context". */
8932 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
8933 context
8934 = TYPE_MAIN_VARIANT
8935 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
8936 else
8937 context = DECL_CONTEXT (decl);
8938
8939 while (context && TREE_CODE (context) != FUNCTION_DECL)
8940 {
8941 if (TREE_CODE (context) == BLOCK)
8942 context = BLOCK_SUPERCONTEXT (context);
8943 else
8944 context = get_containing_scope (context);
8945 }
8946
8947 return context;
8948 }
8949
8950 /* Return the innermost context enclosing DECL that is
8951 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8952 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8953
8954 tree
8955 decl_type_context (const_tree decl)
8956 {
8957 tree context = DECL_CONTEXT (decl);
8958
8959 while (context)
8960 switch (TREE_CODE (context))
8961 {
8962 case NAMESPACE_DECL:
8963 case TRANSLATION_UNIT_DECL:
8964 return NULL_TREE;
8965
8966 case RECORD_TYPE:
8967 case UNION_TYPE:
8968 case QUAL_UNION_TYPE:
8969 return context;
8970
8971 case TYPE_DECL:
8972 case FUNCTION_DECL:
8973 context = DECL_CONTEXT (context);
8974 break;
8975
8976 case BLOCK:
8977 context = BLOCK_SUPERCONTEXT (context);
8978 break;
8979
8980 default:
8981 gcc_unreachable ();
8982 }
8983
8984 return NULL_TREE;
8985 }
8986
8987 /* CALL is a CALL_EXPR. Return the declaration for the function
8988 called, or NULL_TREE if the called function cannot be
8989 determined. */
8990
8991 tree
8992 get_callee_fndecl (const_tree call)
8993 {
8994 tree addr;
8995
8996 if (call == error_mark_node)
8997 return error_mark_node;
8998
8999 /* It's invalid to call this function with anything but a
9000 CALL_EXPR. */
9001 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9002
9003 /* The first operand to the CALL is the address of the function
9004 called. */
9005 addr = CALL_EXPR_FN (call);
9006
9007 /* If there is no function, return early. */
9008 if (addr == NULL_TREE)
9009 return NULL_TREE;
9010
9011 STRIP_NOPS (addr);
9012
9013 /* If this is a readonly function pointer, extract its initial value. */
9014 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
9015 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
9016 && DECL_INITIAL (addr))
9017 addr = DECL_INITIAL (addr);
9018
9019 /* If the address is just `&f' for some function `f', then we know
9020 that `f' is being called. */
9021 if (TREE_CODE (addr) == ADDR_EXPR
9022 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
9023 return TREE_OPERAND (addr, 0);
9024
9025 /* We couldn't figure out what was being called. */
9026 return NULL_TREE;
9027 }
9028
9029 /* Print debugging information about tree nodes generated during the compile,
9030 and any language-specific information. */
9031
9032 void
9033 dump_tree_statistics (void)
9034 {
9035 if (GATHER_STATISTICS)
9036 {
9037 int i;
9038 int total_nodes, total_bytes;
9039 fprintf (stderr, "Kind Nodes Bytes\n");
9040 fprintf (stderr, "---------------------------------------\n");
9041 total_nodes = total_bytes = 0;
9042 for (i = 0; i < (int) all_kinds; i++)
9043 {
9044 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i],
9045 tree_node_counts[i], tree_node_sizes[i]);
9046 total_nodes += tree_node_counts[i];
9047 total_bytes += tree_node_sizes[i];
9048 }
9049 fprintf (stderr, "---------------------------------------\n");
9050 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes);
9051 fprintf (stderr, "---------------------------------------\n");
9052 fprintf (stderr, "Code Nodes\n");
9053 fprintf (stderr, "----------------------------\n");
9054 for (i = 0; i < (int) MAX_TREE_CODES; i++)
9055 fprintf (stderr, "%-20s %7d\n", get_tree_code_name ((enum tree_code) i),
9056 tree_code_counts[i]);
9057 fprintf (stderr, "----------------------------\n");
9058 ssanames_print_statistics ();
9059 phinodes_print_statistics ();
9060 }
9061 else
9062 fprintf (stderr, "(No per-node statistics)\n");
9063
9064 print_type_hash_statistics ();
9065 print_debug_expr_statistics ();
9066 print_value_expr_statistics ();
9067 lang_hooks.print_statistics ();
9068 }
9069 \f
9070 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9071
9072 /* Generate a crc32 of a byte. */
9073
9074 static unsigned
9075 crc32_unsigned_bits (unsigned chksum, unsigned value, unsigned bits)
9076 {
9077 unsigned ix;
9078
9079 for (ix = bits; ix--; value <<= 1)
9080 {
9081 unsigned feedback;
9082
9083 feedback = (value ^ chksum) & 0x80000000 ? 0x04c11db7 : 0;
9084 chksum <<= 1;
9085 chksum ^= feedback;
9086 }
9087 return chksum;
9088 }
9089
9090 /* Generate a crc32 of a 32-bit unsigned. */
9091
9092 unsigned
9093 crc32_unsigned (unsigned chksum, unsigned value)
9094 {
9095 return crc32_unsigned_bits (chksum, value, 32);
9096 }
9097
9098 /* Generate a crc32 of a byte. */
9099
9100 unsigned
9101 crc32_byte (unsigned chksum, char byte)
9102 {
9103 return crc32_unsigned_bits (chksum, (unsigned) byte << 24, 8);
9104 }
9105
9106 /* Generate a crc32 of a string. */
9107
9108 unsigned
9109 crc32_string (unsigned chksum, const char *string)
9110 {
9111 do
9112 {
9113 chksum = crc32_byte (chksum, *string);
9114 }
9115 while (*string++);
9116 return chksum;
9117 }
9118
9119 /* P is a string that will be used in a symbol. Mask out any characters
9120 that are not valid in that context. */
9121
9122 void
9123 clean_symbol_name (char *p)
9124 {
9125 for (; *p; p++)
9126 if (! (ISALNUM (*p)
9127 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9128 || *p == '$'
9129 #endif
9130 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9131 || *p == '.'
9132 #endif
9133 ))
9134 *p = '_';
9135 }
9136
9137 /* Generate a name for a special-purpose function.
9138 The generated name may need to be unique across the whole link.
9139 Changes to this function may also require corresponding changes to
9140 xstrdup_mask_random.
9141 TYPE is some string to identify the purpose of this function to the
9142 linker or collect2; it must start with an uppercase letter,
9143 one of:
9144 I - for constructors
9145 D - for destructors
9146 N - for C++ anonymous namespaces
9147 F - for DWARF unwind frame information. */
9148
9149 tree
9150 get_file_function_name (const char *type)
9151 {
9152 char *buf;
9153 const char *p;
9154 char *q;
9155
9156 /* If we already have a name we know to be unique, just use that. */
9157 if (first_global_object_name)
9158 p = q = ASTRDUP (first_global_object_name);
9159 /* If the target is handling the constructors/destructors, they
9160 will be local to this file and the name is only necessary for
9161 debugging purposes.
9162 We also assign sub_I and sub_D sufixes to constructors called from
9163 the global static constructors. These are always local. */
9164 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9165 || (strncmp (type, "sub_", 4) == 0
9166 && (type[4] == 'I' || type[4] == 'D')))
9167 {
9168 const char *file = main_input_filename;
9169 if (! file)
9170 file = LOCATION_FILE (input_location);
9171 /* Just use the file's basename, because the full pathname
9172 might be quite long. */
9173 p = q = ASTRDUP (lbasename (file));
9174 }
9175 else
9176 {
9177 /* Otherwise, the name must be unique across the entire link.
9178 We don't have anything that we know to be unique to this translation
9179 unit, so use what we do have and throw in some randomness. */
9180 unsigned len;
9181 const char *name = weak_global_object_name;
9182 const char *file = main_input_filename;
9183
9184 if (! name)
9185 name = "";
9186 if (! file)
9187 file = LOCATION_FILE (input_location);
9188
9189 len = strlen (file);
9190 q = (char *) alloca (9 + 17 + len + 1);
9191 memcpy (q, file, len + 1);
9192
9193 snprintf (q + len, 9 + 17 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9194 crc32_string (0, name), get_random_seed (false));
9195
9196 p = q;
9197 }
9198
9199 clean_symbol_name (q);
9200 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9201 + strlen (type));
9202
9203 /* Set up the name of the file-level functions we may need.
9204 Use a global object (which is already required to be unique over
9205 the program) rather than the file name (which imposes extra
9206 constraints). */
9207 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9208
9209 return get_identifier (buf);
9210 }
9211 \f
9212 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9213
9214 /* Complain that the tree code of NODE does not match the expected 0
9215 terminated list of trailing codes. The trailing code list can be
9216 empty, for a more vague error message. FILE, LINE, and FUNCTION
9217 are of the caller. */
9218
9219 void
9220 tree_check_failed (const_tree node, const char *file,
9221 int line, const char *function, ...)
9222 {
9223 va_list args;
9224 const char *buffer;
9225 unsigned length = 0;
9226 enum tree_code code;
9227
9228 va_start (args, function);
9229 while ((code = (enum tree_code) va_arg (args, int)))
9230 length += 4 + strlen (get_tree_code_name (code));
9231 va_end (args);
9232 if (length)
9233 {
9234 char *tmp;
9235 va_start (args, function);
9236 length += strlen ("expected ");
9237 buffer = tmp = (char *) alloca (length);
9238 length = 0;
9239 while ((code = (enum tree_code) va_arg (args, int)))
9240 {
9241 const char *prefix = length ? " or " : "expected ";
9242
9243 strcpy (tmp + length, prefix);
9244 length += strlen (prefix);
9245 strcpy (tmp + length, get_tree_code_name (code));
9246 length += strlen (get_tree_code_name (code));
9247 }
9248 va_end (args);
9249 }
9250 else
9251 buffer = "unexpected node";
9252
9253 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9254 buffer, get_tree_code_name (TREE_CODE (node)),
9255 function, trim_filename (file), line);
9256 }
9257
9258 /* Complain that the tree code of NODE does match the expected 0
9259 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9260 the caller. */
9261
9262 void
9263 tree_not_check_failed (const_tree node, const char *file,
9264 int line, const char *function, ...)
9265 {
9266 va_list args;
9267 char *buffer;
9268 unsigned length = 0;
9269 enum tree_code code;
9270
9271 va_start (args, function);
9272 while ((code = (enum tree_code) va_arg (args, int)))
9273 length += 4 + strlen (get_tree_code_name (code));
9274 va_end (args);
9275 va_start (args, function);
9276 buffer = (char *) alloca (length);
9277 length = 0;
9278 while ((code = (enum tree_code) va_arg (args, int)))
9279 {
9280 if (length)
9281 {
9282 strcpy (buffer + length, " or ");
9283 length += 4;
9284 }
9285 strcpy (buffer + length, get_tree_code_name (code));
9286 length += strlen (get_tree_code_name (code));
9287 }
9288 va_end (args);
9289
9290 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9291 buffer, get_tree_code_name (TREE_CODE (node)),
9292 function, trim_filename (file), line);
9293 }
9294
9295 /* Similar to tree_check_failed, except that we check for a class of tree
9296 code, given in CL. */
9297
9298 void
9299 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9300 const char *file, int line, const char *function)
9301 {
9302 internal_error
9303 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9304 TREE_CODE_CLASS_STRING (cl),
9305 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9306 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9307 }
9308
9309 /* Similar to tree_check_failed, except that instead of specifying a
9310 dozen codes, use the knowledge that they're all sequential. */
9311
9312 void
9313 tree_range_check_failed (const_tree node, const char *file, int line,
9314 const char *function, enum tree_code c1,
9315 enum tree_code c2)
9316 {
9317 char *buffer;
9318 unsigned length = 0;
9319 unsigned int c;
9320
9321 for (c = c1; c <= c2; ++c)
9322 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9323
9324 length += strlen ("expected ");
9325 buffer = (char *) alloca (length);
9326 length = 0;
9327
9328 for (c = c1; c <= c2; ++c)
9329 {
9330 const char *prefix = length ? " or " : "expected ";
9331
9332 strcpy (buffer + length, prefix);
9333 length += strlen (prefix);
9334 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9335 length += strlen (get_tree_code_name ((enum tree_code) c));
9336 }
9337
9338 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9339 buffer, get_tree_code_name (TREE_CODE (node)),
9340 function, trim_filename (file), line);
9341 }
9342
9343
9344 /* Similar to tree_check_failed, except that we check that a tree does
9345 not have the specified code, given in CL. */
9346
9347 void
9348 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9349 const char *file, int line, const char *function)
9350 {
9351 internal_error
9352 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9353 TREE_CODE_CLASS_STRING (cl),
9354 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9355 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9356 }
9357
9358
9359 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9360
9361 void
9362 omp_clause_check_failed (const_tree node, const char *file, int line,
9363 const char *function, enum omp_clause_code code)
9364 {
9365 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9366 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9367 function, trim_filename (file), line);
9368 }
9369
9370
9371 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9372
9373 void
9374 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9375 const char *function, enum omp_clause_code c1,
9376 enum omp_clause_code c2)
9377 {
9378 char *buffer;
9379 unsigned length = 0;
9380 unsigned int c;
9381
9382 for (c = c1; c <= c2; ++c)
9383 length += 4 + strlen (omp_clause_code_name[c]);
9384
9385 length += strlen ("expected ");
9386 buffer = (char *) alloca (length);
9387 length = 0;
9388
9389 for (c = c1; c <= c2; ++c)
9390 {
9391 const char *prefix = length ? " or " : "expected ";
9392
9393 strcpy (buffer + length, prefix);
9394 length += strlen (prefix);
9395 strcpy (buffer + length, omp_clause_code_name[c]);
9396 length += strlen (omp_clause_code_name[c]);
9397 }
9398
9399 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9400 buffer, omp_clause_code_name[TREE_CODE (node)],
9401 function, trim_filename (file), line);
9402 }
9403
9404
9405 #undef DEFTREESTRUCT
9406 #define DEFTREESTRUCT(VAL, NAME) NAME,
9407
9408 static const char *ts_enum_names[] = {
9409 #include "treestruct.def"
9410 };
9411 #undef DEFTREESTRUCT
9412
9413 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9414
9415 /* Similar to tree_class_check_failed, except that we check for
9416 whether CODE contains the tree structure identified by EN. */
9417
9418 void
9419 tree_contains_struct_check_failed (const_tree node,
9420 const enum tree_node_structure_enum en,
9421 const char *file, int line,
9422 const char *function)
9423 {
9424 internal_error
9425 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9426 TS_ENUM_NAME (en),
9427 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9428 }
9429
9430
9431 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9432 (dynamically sized) vector. */
9433
9434 void
9435 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9436 const char *function)
9437 {
9438 internal_error
9439 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9440 idx + 1, len, function, trim_filename (file), line);
9441 }
9442
9443 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9444 (dynamically sized) vector. */
9445
9446 void
9447 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9448 const char *function)
9449 {
9450 internal_error
9451 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9452 idx + 1, len, function, trim_filename (file), line);
9453 }
9454
9455 /* Similar to above, except that the check is for the bounds of the operand
9456 vector of an expression node EXP. */
9457
9458 void
9459 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9460 int line, const char *function)
9461 {
9462 enum tree_code code = TREE_CODE (exp);
9463 internal_error
9464 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9465 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9466 function, trim_filename (file), line);
9467 }
9468
9469 /* Similar to above, except that the check is for the number of
9470 operands of an OMP_CLAUSE node. */
9471
9472 void
9473 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9474 int line, const char *function)
9475 {
9476 internal_error
9477 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9478 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9479 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9480 trim_filename (file), line);
9481 }
9482 #endif /* ENABLE_TREE_CHECKING */
9483 \f
9484 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9485 and mapped to the machine mode MODE. Initialize its fields and build
9486 the information necessary for debugging output. */
9487
9488 static tree
9489 make_vector_type (tree innertype, int nunits, machine_mode mode)
9490 {
9491 tree t;
9492 inchash::hash hstate;
9493
9494 t = make_node (VECTOR_TYPE);
9495 TREE_TYPE (t) = TYPE_MAIN_VARIANT (innertype);
9496 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9497 SET_TYPE_MODE (t, mode);
9498
9499 if (TYPE_STRUCTURAL_EQUALITY_P (innertype))
9500 SET_TYPE_STRUCTURAL_EQUALITY (t);
9501 else if (TYPE_CANONICAL (innertype) != innertype
9502 || mode != VOIDmode)
9503 TYPE_CANONICAL (t)
9504 = make_vector_type (TYPE_CANONICAL (innertype), nunits, VOIDmode);
9505
9506 layout_type (t);
9507
9508 hstate.add_wide_int (VECTOR_TYPE);
9509 hstate.add_wide_int (nunits);
9510 hstate.add_wide_int (mode);
9511 hstate.add_object (TYPE_HASH (TREE_TYPE (t)));
9512 t = type_hash_canon (hstate.end (), t);
9513
9514 /* We have built a main variant, based on the main variant of the
9515 inner type. Use it to build the variant we return. */
9516 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9517 && TREE_TYPE (t) != innertype)
9518 return build_type_attribute_qual_variant (t,
9519 TYPE_ATTRIBUTES (innertype),
9520 TYPE_QUALS (innertype));
9521
9522 return t;
9523 }
9524
9525 static tree
9526 make_or_reuse_type (unsigned size, int unsignedp)
9527 {
9528 int i;
9529
9530 if (size == INT_TYPE_SIZE)
9531 return unsignedp ? unsigned_type_node : integer_type_node;
9532 if (size == CHAR_TYPE_SIZE)
9533 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9534 if (size == SHORT_TYPE_SIZE)
9535 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9536 if (size == LONG_TYPE_SIZE)
9537 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9538 if (size == LONG_LONG_TYPE_SIZE)
9539 return (unsignedp ? long_long_unsigned_type_node
9540 : long_long_integer_type_node);
9541
9542 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9543 if (size == int_n_data[i].bitsize
9544 && int_n_enabled_p[i])
9545 return (unsignedp ? int_n_trees[i].unsigned_type
9546 : int_n_trees[i].signed_type);
9547
9548 if (unsignedp)
9549 return make_unsigned_type (size);
9550 else
9551 return make_signed_type (size);
9552 }
9553
9554 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9555
9556 static tree
9557 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9558 {
9559 if (satp)
9560 {
9561 if (size == SHORT_FRACT_TYPE_SIZE)
9562 return unsignedp ? sat_unsigned_short_fract_type_node
9563 : sat_short_fract_type_node;
9564 if (size == FRACT_TYPE_SIZE)
9565 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9566 if (size == LONG_FRACT_TYPE_SIZE)
9567 return unsignedp ? sat_unsigned_long_fract_type_node
9568 : sat_long_fract_type_node;
9569 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9570 return unsignedp ? sat_unsigned_long_long_fract_type_node
9571 : sat_long_long_fract_type_node;
9572 }
9573 else
9574 {
9575 if (size == SHORT_FRACT_TYPE_SIZE)
9576 return unsignedp ? unsigned_short_fract_type_node
9577 : short_fract_type_node;
9578 if (size == FRACT_TYPE_SIZE)
9579 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9580 if (size == LONG_FRACT_TYPE_SIZE)
9581 return unsignedp ? unsigned_long_fract_type_node
9582 : long_fract_type_node;
9583 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9584 return unsignedp ? unsigned_long_long_fract_type_node
9585 : long_long_fract_type_node;
9586 }
9587
9588 return make_fract_type (size, unsignedp, satp);
9589 }
9590
9591 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9592
9593 static tree
9594 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9595 {
9596 if (satp)
9597 {
9598 if (size == SHORT_ACCUM_TYPE_SIZE)
9599 return unsignedp ? sat_unsigned_short_accum_type_node
9600 : sat_short_accum_type_node;
9601 if (size == ACCUM_TYPE_SIZE)
9602 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9603 if (size == LONG_ACCUM_TYPE_SIZE)
9604 return unsignedp ? sat_unsigned_long_accum_type_node
9605 : sat_long_accum_type_node;
9606 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9607 return unsignedp ? sat_unsigned_long_long_accum_type_node
9608 : sat_long_long_accum_type_node;
9609 }
9610 else
9611 {
9612 if (size == SHORT_ACCUM_TYPE_SIZE)
9613 return unsignedp ? unsigned_short_accum_type_node
9614 : short_accum_type_node;
9615 if (size == ACCUM_TYPE_SIZE)
9616 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9617 if (size == LONG_ACCUM_TYPE_SIZE)
9618 return unsignedp ? unsigned_long_accum_type_node
9619 : long_accum_type_node;
9620 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9621 return unsignedp ? unsigned_long_long_accum_type_node
9622 : long_long_accum_type_node;
9623 }
9624
9625 return make_accum_type (size, unsignedp, satp);
9626 }
9627
9628
9629 /* Create an atomic variant node for TYPE. This routine is called
9630 during initialization of data types to create the 5 basic atomic
9631 types. The generic build_variant_type function requires these to
9632 already be set up in order to function properly, so cannot be
9633 called from there. If ALIGN is non-zero, then ensure alignment is
9634 overridden to this value. */
9635
9636 static tree
9637 build_atomic_base (tree type, unsigned int align)
9638 {
9639 tree t;
9640
9641 /* Make sure its not already registered. */
9642 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9643 return t;
9644
9645 t = build_variant_type_copy (type);
9646 set_type_quals (t, TYPE_QUAL_ATOMIC);
9647
9648 if (align)
9649 TYPE_ALIGN (t) = align;
9650
9651 return t;
9652 }
9653
9654 /* Create nodes for all integer types (and error_mark_node) using the sizes
9655 of C datatypes. SIGNED_CHAR specifies whether char is signed,
9656 SHORT_DOUBLE specifies whether double should be of the same precision
9657 as float. */
9658
9659 void
9660 build_common_tree_nodes (bool signed_char, bool short_double)
9661 {
9662 int i;
9663
9664 error_mark_node = make_node (ERROR_MARK);
9665 TREE_TYPE (error_mark_node) = error_mark_node;
9666
9667 initialize_sizetypes ();
9668
9669 /* Define both `signed char' and `unsigned char'. */
9670 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9671 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9672 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9673 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9674
9675 /* Define `char', which is like either `signed char' or `unsigned char'
9676 but not the same as either. */
9677 char_type_node
9678 = (signed_char
9679 ? make_signed_type (CHAR_TYPE_SIZE)
9680 : make_unsigned_type (CHAR_TYPE_SIZE));
9681 TYPE_STRING_FLAG (char_type_node) = 1;
9682
9683 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9684 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9685 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9686 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9687 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9688 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9689 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9690 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9691
9692 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9693 {
9694 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
9695 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
9696 TYPE_SIZE (int_n_trees[i].signed_type) = bitsize_int (int_n_data[i].bitsize);
9697 TYPE_SIZE (int_n_trees[i].unsigned_type) = bitsize_int (int_n_data[i].bitsize);
9698
9699 if (int_n_data[i].bitsize > LONG_LONG_TYPE_SIZE
9700 && int_n_enabled_p[i])
9701 {
9702 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
9703 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
9704 }
9705 }
9706
9707 /* Define a boolean type. This type only represents boolean values but
9708 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9709 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9710 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9711 TYPE_PRECISION (boolean_type_node) = 1;
9712 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9713
9714 /* Define what type to use for size_t. */
9715 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9716 size_type_node = unsigned_type_node;
9717 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9718 size_type_node = long_unsigned_type_node;
9719 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9720 size_type_node = long_long_unsigned_type_node;
9721 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9722 size_type_node = short_unsigned_type_node;
9723 else
9724 {
9725 int i;
9726
9727 size_type_node = NULL_TREE;
9728 for (i = 0; i < NUM_INT_N_ENTS; i++)
9729 if (int_n_enabled_p[i])
9730 {
9731 char name[50];
9732 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
9733
9734 if (strcmp (name, SIZE_TYPE) == 0)
9735 {
9736 size_type_node = int_n_trees[i].unsigned_type;
9737 }
9738 }
9739 if (size_type_node == NULL_TREE)
9740 gcc_unreachable ();
9741 }
9742
9743 /* Fill in the rest of the sized types. Reuse existing type nodes
9744 when possible. */
9745 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9746 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9747 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9748 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9749 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9750
9751 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9752 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9753 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9754 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9755 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9756
9757 /* Don't call build_qualified type for atomics. That routine does
9758 special processing for atomics, and until they are initialized
9759 it's better not to make that call.
9760
9761 Check to see if there is a target override for atomic types. */
9762
9763 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
9764 targetm.atomic_align_for_mode (QImode));
9765 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
9766 targetm.atomic_align_for_mode (HImode));
9767 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
9768 targetm.atomic_align_for_mode (SImode));
9769 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
9770 targetm.atomic_align_for_mode (DImode));
9771 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
9772 targetm.atomic_align_for_mode (TImode));
9773
9774 access_public_node = get_identifier ("public");
9775 access_protected_node = get_identifier ("protected");
9776 access_private_node = get_identifier ("private");
9777
9778 /* Define these next since types below may used them. */
9779 integer_zero_node = build_int_cst (integer_type_node, 0);
9780 integer_one_node = build_int_cst (integer_type_node, 1);
9781 integer_three_node = build_int_cst (integer_type_node, 3);
9782 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9783
9784 size_zero_node = size_int (0);
9785 size_one_node = size_int (1);
9786 bitsize_zero_node = bitsize_int (0);
9787 bitsize_one_node = bitsize_int (1);
9788 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9789
9790 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9791 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9792
9793 void_type_node = make_node (VOID_TYPE);
9794 layout_type (void_type_node);
9795
9796 pointer_bounds_type_node = targetm.chkp_bound_type ();
9797
9798 /* We are not going to have real types in C with less than byte alignment,
9799 so we might as well not have any types that claim to have it. */
9800 TYPE_ALIGN (void_type_node) = BITS_PER_UNIT;
9801 TYPE_USER_ALIGN (void_type_node) = 0;
9802
9803 void_node = make_node (VOID_CST);
9804 TREE_TYPE (void_node) = void_type_node;
9805
9806 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9807 layout_type (TREE_TYPE (null_pointer_node));
9808
9809 ptr_type_node = build_pointer_type (void_type_node);
9810 const_ptr_type_node
9811 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9812 fileptr_type_node = ptr_type_node;
9813
9814 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9815
9816 float_type_node = make_node (REAL_TYPE);
9817 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9818 layout_type (float_type_node);
9819
9820 double_type_node = make_node (REAL_TYPE);
9821 if (short_double)
9822 TYPE_PRECISION (double_type_node) = FLOAT_TYPE_SIZE;
9823 else
9824 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9825 layout_type (double_type_node);
9826
9827 long_double_type_node = make_node (REAL_TYPE);
9828 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9829 layout_type (long_double_type_node);
9830
9831 float_ptr_type_node = build_pointer_type (float_type_node);
9832 double_ptr_type_node = build_pointer_type (double_type_node);
9833 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9834 integer_ptr_type_node = build_pointer_type (integer_type_node);
9835
9836 /* Fixed size integer types. */
9837 uint16_type_node = make_or_reuse_type (16, 1);
9838 uint32_type_node = make_or_reuse_type (32, 1);
9839 uint64_type_node = make_or_reuse_type (64, 1);
9840
9841 /* Decimal float types. */
9842 dfloat32_type_node = make_node (REAL_TYPE);
9843 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9844 layout_type (dfloat32_type_node);
9845 SET_TYPE_MODE (dfloat32_type_node, SDmode);
9846 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
9847
9848 dfloat64_type_node = make_node (REAL_TYPE);
9849 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9850 layout_type (dfloat64_type_node);
9851 SET_TYPE_MODE (dfloat64_type_node, DDmode);
9852 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
9853
9854 dfloat128_type_node = make_node (REAL_TYPE);
9855 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9856 layout_type (dfloat128_type_node);
9857 SET_TYPE_MODE (dfloat128_type_node, TDmode);
9858 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
9859
9860 complex_integer_type_node = build_complex_type (integer_type_node);
9861 complex_float_type_node = build_complex_type (float_type_node);
9862 complex_double_type_node = build_complex_type (double_type_node);
9863 complex_long_double_type_node = build_complex_type (long_double_type_node);
9864
9865 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9866 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9867 sat_ ## KIND ## _type_node = \
9868 make_sat_signed_ ## KIND ## _type (SIZE); \
9869 sat_unsigned_ ## KIND ## _type_node = \
9870 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9871 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9872 unsigned_ ## KIND ## _type_node = \
9873 make_unsigned_ ## KIND ## _type (SIZE);
9874
9875 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9876 sat_ ## WIDTH ## KIND ## _type_node = \
9877 make_sat_signed_ ## KIND ## _type (SIZE); \
9878 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9879 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9880 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9881 unsigned_ ## WIDTH ## KIND ## _type_node = \
9882 make_unsigned_ ## KIND ## _type (SIZE);
9883
9884 /* Make fixed-point type nodes based on four different widths. */
9885 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9886 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9887 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9888 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9889 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9890
9891 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9892 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9893 NAME ## _type_node = \
9894 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9895 u ## NAME ## _type_node = \
9896 make_or_reuse_unsigned_ ## KIND ## _type \
9897 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9898 sat_ ## NAME ## _type_node = \
9899 make_or_reuse_sat_signed_ ## KIND ## _type \
9900 (GET_MODE_BITSIZE (MODE ## mode)); \
9901 sat_u ## NAME ## _type_node = \
9902 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9903 (GET_MODE_BITSIZE (U ## MODE ## mode));
9904
9905 /* Fixed-point type and mode nodes. */
9906 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
9907 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
9908 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
9909 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
9910 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
9911 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
9912 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
9913 MAKE_FIXED_MODE_NODE (accum, ha, HA)
9914 MAKE_FIXED_MODE_NODE (accum, sa, SA)
9915 MAKE_FIXED_MODE_NODE (accum, da, DA)
9916 MAKE_FIXED_MODE_NODE (accum, ta, TA)
9917
9918 {
9919 tree t = targetm.build_builtin_va_list ();
9920
9921 /* Many back-ends define record types without setting TYPE_NAME.
9922 If we copied the record type here, we'd keep the original
9923 record type without a name. This breaks name mangling. So,
9924 don't copy record types and let c_common_nodes_and_builtins()
9925 declare the type to be __builtin_va_list. */
9926 if (TREE_CODE (t) != RECORD_TYPE)
9927 t = build_variant_type_copy (t);
9928
9929 va_list_type_node = t;
9930 }
9931 }
9932
9933 /* Modify DECL for given flags.
9934 TM_PURE attribute is set only on types, so the function will modify
9935 DECL's type when ECF_TM_PURE is used. */
9936
9937 void
9938 set_call_expr_flags (tree decl, int flags)
9939 {
9940 if (flags & ECF_NOTHROW)
9941 TREE_NOTHROW (decl) = 1;
9942 if (flags & ECF_CONST)
9943 TREE_READONLY (decl) = 1;
9944 if (flags & ECF_PURE)
9945 DECL_PURE_P (decl) = 1;
9946 if (flags & ECF_LOOPING_CONST_OR_PURE)
9947 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
9948 if (flags & ECF_NOVOPS)
9949 DECL_IS_NOVOPS (decl) = 1;
9950 if (flags & ECF_NORETURN)
9951 TREE_THIS_VOLATILE (decl) = 1;
9952 if (flags & ECF_MALLOC)
9953 DECL_IS_MALLOC (decl) = 1;
9954 if (flags & ECF_RETURNS_TWICE)
9955 DECL_IS_RETURNS_TWICE (decl) = 1;
9956 if (flags & ECF_LEAF)
9957 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
9958 NULL, DECL_ATTRIBUTES (decl));
9959 if ((flags & ECF_TM_PURE) && flag_tm)
9960 apply_tm_attr (decl, get_identifier ("transaction_pure"));
9961 /* Looping const or pure is implied by noreturn.
9962 There is currently no way to declare looping const or looping pure alone. */
9963 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
9964 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
9965 }
9966
9967
9968 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9969
9970 static void
9971 local_define_builtin (const char *name, tree type, enum built_in_function code,
9972 const char *library_name, int ecf_flags)
9973 {
9974 tree decl;
9975
9976 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
9977 library_name, NULL_TREE);
9978 set_call_expr_flags (decl, ecf_flags);
9979
9980 set_builtin_decl (code, decl, true);
9981 }
9982
9983 /* Call this function after instantiating all builtins that the language
9984 front end cares about. This will build the rest of the builtins
9985 and internal functions that are relied upon by the tree optimizers and
9986 the middle-end. */
9987
9988 void
9989 build_common_builtin_nodes (void)
9990 {
9991 tree tmp, ftype;
9992 int ecf_flags;
9993
9994 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
9995 {
9996 ftype = build_function_type (void_type_node, void_list_node);
9997 local_define_builtin ("__builtin_unreachable", ftype, BUILT_IN_UNREACHABLE,
9998 "__builtin_unreachable",
9999 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
10000 | ECF_CONST);
10001 }
10002
10003 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
10004 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10005 {
10006 ftype = build_function_type_list (ptr_type_node,
10007 ptr_type_node, const_ptr_type_node,
10008 size_type_node, NULL_TREE);
10009
10010 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
10011 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
10012 "memcpy", ECF_NOTHROW | ECF_LEAF);
10013 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10014 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
10015 "memmove", ECF_NOTHROW | ECF_LEAF);
10016 }
10017
10018 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
10019 {
10020 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10021 const_ptr_type_node, size_type_node,
10022 NULL_TREE);
10023 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
10024 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10025 }
10026
10027 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
10028 {
10029 ftype = build_function_type_list (ptr_type_node,
10030 ptr_type_node, integer_type_node,
10031 size_type_node, NULL_TREE);
10032 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
10033 "memset", ECF_NOTHROW | ECF_LEAF);
10034 }
10035
10036 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
10037 {
10038 ftype = build_function_type_list (ptr_type_node,
10039 size_type_node, NULL_TREE);
10040 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
10041 "alloca", ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
10042 }
10043
10044 ftype = build_function_type_list (ptr_type_node, size_type_node,
10045 size_type_node, NULL_TREE);
10046 local_define_builtin ("__builtin_alloca_with_align", ftype,
10047 BUILT_IN_ALLOCA_WITH_ALIGN, "alloca",
10048 ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
10049
10050 /* If we're checking the stack, `alloca' can throw. */
10051 if (flag_stack_check)
10052 {
10053 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA)) = 0;
10054 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN)) = 0;
10055 }
10056
10057 ftype = build_function_type_list (void_type_node,
10058 ptr_type_node, ptr_type_node,
10059 ptr_type_node, NULL_TREE);
10060 local_define_builtin ("__builtin_init_trampoline", ftype,
10061 BUILT_IN_INIT_TRAMPOLINE,
10062 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
10063 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
10064 BUILT_IN_INIT_HEAP_TRAMPOLINE,
10065 "__builtin_init_heap_trampoline",
10066 ECF_NOTHROW | ECF_LEAF);
10067
10068 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
10069 local_define_builtin ("__builtin_adjust_trampoline", ftype,
10070 BUILT_IN_ADJUST_TRAMPOLINE,
10071 "__builtin_adjust_trampoline",
10072 ECF_CONST | ECF_NOTHROW);
10073
10074 ftype = build_function_type_list (void_type_node,
10075 ptr_type_node, ptr_type_node, NULL_TREE);
10076 local_define_builtin ("__builtin_nonlocal_goto", ftype,
10077 BUILT_IN_NONLOCAL_GOTO,
10078 "__builtin_nonlocal_goto",
10079 ECF_NORETURN | ECF_NOTHROW);
10080
10081 ftype = build_function_type_list (void_type_node,
10082 ptr_type_node, ptr_type_node, NULL_TREE);
10083 local_define_builtin ("__builtin_setjmp_setup", ftype,
10084 BUILT_IN_SETJMP_SETUP,
10085 "__builtin_setjmp_setup", ECF_NOTHROW);
10086
10087 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10088 local_define_builtin ("__builtin_setjmp_receiver", ftype,
10089 BUILT_IN_SETJMP_RECEIVER,
10090 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
10091
10092 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
10093 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
10094 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
10095
10096 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10097 local_define_builtin ("__builtin_stack_restore", ftype,
10098 BUILT_IN_STACK_RESTORE,
10099 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
10100
10101 /* If there's a possibility that we might use the ARM EABI, build the
10102 alternate __cxa_end_cleanup node used to resume from C++ and Java. */
10103 if (targetm.arm_eabi_unwinder)
10104 {
10105 ftype = build_function_type_list (void_type_node, NULL_TREE);
10106 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10107 BUILT_IN_CXA_END_CLEANUP,
10108 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10109 }
10110
10111 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10112 local_define_builtin ("__builtin_unwind_resume", ftype,
10113 BUILT_IN_UNWIND_RESUME,
10114 ((targetm_common.except_unwind_info (&global_options)
10115 == UI_SJLJ)
10116 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10117 ECF_NORETURN);
10118
10119 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10120 {
10121 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10122 NULL_TREE);
10123 local_define_builtin ("__builtin_return_address", ftype,
10124 BUILT_IN_RETURN_ADDRESS,
10125 "__builtin_return_address",
10126 ECF_NOTHROW);
10127 }
10128
10129 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10130 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10131 {
10132 ftype = build_function_type_list (void_type_node, ptr_type_node,
10133 ptr_type_node, NULL_TREE);
10134 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10135 local_define_builtin ("__cyg_profile_func_enter", ftype,
10136 BUILT_IN_PROFILE_FUNC_ENTER,
10137 "__cyg_profile_func_enter", 0);
10138 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10139 local_define_builtin ("__cyg_profile_func_exit", ftype,
10140 BUILT_IN_PROFILE_FUNC_EXIT,
10141 "__cyg_profile_func_exit", 0);
10142 }
10143
10144 /* The exception object and filter values from the runtime. The argument
10145 must be zero before exception lowering, i.e. from the front end. After
10146 exception lowering, it will be the region number for the exception
10147 landing pad. These functions are PURE instead of CONST to prevent
10148 them from being hoisted past the exception edge that will initialize
10149 its value in the landing pad. */
10150 ftype = build_function_type_list (ptr_type_node,
10151 integer_type_node, NULL_TREE);
10152 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10153 /* Only use TM_PURE if we we have TM language support. */
10154 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10155 ecf_flags |= ECF_TM_PURE;
10156 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10157 "__builtin_eh_pointer", ecf_flags);
10158
10159 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10160 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10161 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10162 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10163
10164 ftype = build_function_type_list (void_type_node,
10165 integer_type_node, integer_type_node,
10166 NULL_TREE);
10167 local_define_builtin ("__builtin_eh_copy_values", ftype,
10168 BUILT_IN_EH_COPY_VALUES,
10169 "__builtin_eh_copy_values", ECF_NOTHROW);
10170
10171 /* Complex multiplication and division. These are handled as builtins
10172 rather than optabs because emit_library_call_value doesn't support
10173 complex. Further, we can do slightly better with folding these
10174 beasties if the real and complex parts of the arguments are separate. */
10175 {
10176 int mode;
10177
10178 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10179 {
10180 char mode_name_buf[4], *q;
10181 const char *p;
10182 enum built_in_function mcode, dcode;
10183 tree type, inner_type;
10184 const char *prefix = "__";
10185
10186 if (targetm.libfunc_gnu_prefix)
10187 prefix = "__gnu_";
10188
10189 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
10190 if (type == NULL)
10191 continue;
10192 inner_type = TREE_TYPE (type);
10193
10194 ftype = build_function_type_list (type, inner_type, inner_type,
10195 inner_type, inner_type, NULL_TREE);
10196
10197 mcode = ((enum built_in_function)
10198 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10199 dcode = ((enum built_in_function)
10200 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10201
10202 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10203 *q = TOLOWER (*p);
10204 *q = '\0';
10205
10206 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10207 NULL);
10208 local_define_builtin (built_in_names[mcode], ftype, mcode,
10209 built_in_names[mcode],
10210 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10211
10212 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10213 NULL);
10214 local_define_builtin (built_in_names[dcode], ftype, dcode,
10215 built_in_names[dcode],
10216 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10217 }
10218 }
10219
10220 init_internal_fns ();
10221 }
10222
10223 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10224 better way.
10225
10226 If we requested a pointer to a vector, build up the pointers that
10227 we stripped off while looking for the inner type. Similarly for
10228 return values from functions.
10229
10230 The argument TYPE is the top of the chain, and BOTTOM is the
10231 new type which we will point to. */
10232
10233 tree
10234 reconstruct_complex_type (tree type, tree bottom)
10235 {
10236 tree inner, outer;
10237
10238 if (TREE_CODE (type) == POINTER_TYPE)
10239 {
10240 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10241 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10242 TYPE_REF_CAN_ALIAS_ALL (type));
10243 }
10244 else if (TREE_CODE (type) == REFERENCE_TYPE)
10245 {
10246 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10247 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10248 TYPE_REF_CAN_ALIAS_ALL (type));
10249 }
10250 else if (TREE_CODE (type) == ARRAY_TYPE)
10251 {
10252 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10253 outer = build_array_type (inner, TYPE_DOMAIN (type));
10254 }
10255 else if (TREE_CODE (type) == FUNCTION_TYPE)
10256 {
10257 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10258 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10259 }
10260 else if (TREE_CODE (type) == METHOD_TYPE)
10261 {
10262 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10263 /* The build_method_type_directly() routine prepends 'this' to argument list,
10264 so we must compensate by getting rid of it. */
10265 outer
10266 = build_method_type_directly
10267 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10268 inner,
10269 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10270 }
10271 else if (TREE_CODE (type) == OFFSET_TYPE)
10272 {
10273 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10274 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10275 }
10276 else
10277 return bottom;
10278
10279 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10280 TYPE_QUALS (type));
10281 }
10282
10283 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10284 the inner type. */
10285 tree
10286 build_vector_type_for_mode (tree innertype, machine_mode mode)
10287 {
10288 int nunits;
10289
10290 switch (GET_MODE_CLASS (mode))
10291 {
10292 case MODE_VECTOR_INT:
10293 case MODE_VECTOR_FLOAT:
10294 case MODE_VECTOR_FRACT:
10295 case MODE_VECTOR_UFRACT:
10296 case MODE_VECTOR_ACCUM:
10297 case MODE_VECTOR_UACCUM:
10298 nunits = GET_MODE_NUNITS (mode);
10299 break;
10300
10301 case MODE_INT:
10302 /* Check that there are no leftover bits. */
10303 gcc_assert (GET_MODE_BITSIZE (mode)
10304 % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10305
10306 nunits = GET_MODE_BITSIZE (mode)
10307 / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10308 break;
10309
10310 default:
10311 gcc_unreachable ();
10312 }
10313
10314 return make_vector_type (innertype, nunits, mode);
10315 }
10316
10317 /* Similarly, but takes the inner type and number of units, which must be
10318 a power of two. */
10319
10320 tree
10321 build_vector_type (tree innertype, int nunits)
10322 {
10323 return make_vector_type (innertype, nunits, VOIDmode);
10324 }
10325
10326 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10327
10328 tree
10329 build_opaque_vector_type (tree innertype, int nunits)
10330 {
10331 tree t = make_vector_type (innertype, nunits, VOIDmode);
10332 tree cand;
10333 /* We always build the non-opaque variant before the opaque one,
10334 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10335 cand = TYPE_NEXT_VARIANT (t);
10336 if (cand
10337 && TYPE_VECTOR_OPAQUE (cand)
10338 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10339 return cand;
10340 /* Othewise build a variant type and make sure to queue it after
10341 the non-opaque type. */
10342 cand = build_distinct_type_copy (t);
10343 TYPE_VECTOR_OPAQUE (cand) = true;
10344 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10345 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10346 TYPE_NEXT_VARIANT (t) = cand;
10347 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10348 return cand;
10349 }
10350
10351
10352 /* Given an initializer INIT, return TRUE if INIT is zero or some
10353 aggregate of zeros. Otherwise return FALSE. */
10354 bool
10355 initializer_zerop (const_tree init)
10356 {
10357 tree elt;
10358
10359 STRIP_NOPS (init);
10360
10361 switch (TREE_CODE (init))
10362 {
10363 case INTEGER_CST:
10364 return integer_zerop (init);
10365
10366 case REAL_CST:
10367 /* ??? Note that this is not correct for C4X float formats. There,
10368 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10369 negative exponent. */
10370 return real_zerop (init)
10371 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
10372
10373 case FIXED_CST:
10374 return fixed_zerop (init);
10375
10376 case COMPLEX_CST:
10377 return integer_zerop (init)
10378 || (real_zerop (init)
10379 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10380 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
10381
10382 case VECTOR_CST:
10383 {
10384 unsigned i;
10385 for (i = 0; i < VECTOR_CST_NELTS (init); ++i)
10386 if (!initializer_zerop (VECTOR_CST_ELT (init, i)))
10387 return false;
10388 return true;
10389 }
10390
10391 case CONSTRUCTOR:
10392 {
10393 unsigned HOST_WIDE_INT idx;
10394
10395 if (TREE_CLOBBER_P (init))
10396 return false;
10397 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10398 if (!initializer_zerop (elt))
10399 return false;
10400 return true;
10401 }
10402
10403 case STRING_CST:
10404 {
10405 int i;
10406
10407 /* We need to loop through all elements to handle cases like
10408 "\0" and "\0foobar". */
10409 for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
10410 if (TREE_STRING_POINTER (init)[i] != '\0')
10411 return false;
10412
10413 return true;
10414 }
10415
10416 default:
10417 return false;
10418 }
10419 }
10420
10421 /* Check if vector VEC consists of all the equal elements and
10422 that the number of elements corresponds to the type of VEC.
10423 The function returns first element of the vector
10424 or NULL_TREE if the vector is not uniform. */
10425 tree
10426 uniform_vector_p (const_tree vec)
10427 {
10428 tree first, t;
10429 unsigned i;
10430
10431 if (vec == NULL_TREE)
10432 return NULL_TREE;
10433
10434 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10435
10436 if (TREE_CODE (vec) == VECTOR_CST)
10437 {
10438 first = VECTOR_CST_ELT (vec, 0);
10439 for (i = 1; i < VECTOR_CST_NELTS (vec); ++i)
10440 if (!operand_equal_p (first, VECTOR_CST_ELT (vec, i), 0))
10441 return NULL_TREE;
10442
10443 return first;
10444 }
10445
10446 else if (TREE_CODE (vec) == CONSTRUCTOR)
10447 {
10448 first = error_mark_node;
10449
10450 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10451 {
10452 if (i == 0)
10453 {
10454 first = t;
10455 continue;
10456 }
10457 if (!operand_equal_p (first, t, 0))
10458 return NULL_TREE;
10459 }
10460 if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)))
10461 return NULL_TREE;
10462
10463 return first;
10464 }
10465
10466 return NULL_TREE;
10467 }
10468
10469 /* Build an empty statement at location LOC. */
10470
10471 tree
10472 build_empty_stmt (location_t loc)
10473 {
10474 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10475 SET_EXPR_LOCATION (t, loc);
10476 return t;
10477 }
10478
10479
10480 /* Build an OpenMP clause with code CODE. LOC is the location of the
10481 clause. */
10482
10483 tree
10484 build_omp_clause (location_t loc, enum omp_clause_code code)
10485 {
10486 tree t;
10487 int size, length;
10488
10489 length = omp_clause_num_ops[code];
10490 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10491
10492 record_node_allocation_statistics (OMP_CLAUSE, size);
10493
10494 t = (tree) ggc_internal_alloc (size);
10495 memset (t, 0, size);
10496 TREE_SET_CODE (t, OMP_CLAUSE);
10497 OMP_CLAUSE_SET_CODE (t, code);
10498 OMP_CLAUSE_LOCATION (t) = loc;
10499
10500 return t;
10501 }
10502
10503 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10504 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10505 Except for the CODE and operand count field, other storage for the
10506 object is initialized to zeros. */
10507
10508 tree
10509 build_vl_exp_stat (enum tree_code code, int len MEM_STAT_DECL)
10510 {
10511 tree t;
10512 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10513
10514 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10515 gcc_assert (len >= 1);
10516
10517 record_node_allocation_statistics (code, length);
10518
10519 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10520
10521 TREE_SET_CODE (t, code);
10522
10523 /* Can't use TREE_OPERAND to store the length because if checking is
10524 enabled, it will try to check the length before we store it. :-P */
10525 t->exp.operands[0] = build_int_cst (sizetype, len);
10526
10527 return t;
10528 }
10529
10530 /* Helper function for build_call_* functions; build a CALL_EXPR with
10531 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10532 the argument slots. */
10533
10534 static tree
10535 build_call_1 (tree return_type, tree fn, int nargs)
10536 {
10537 tree t;
10538
10539 t = build_vl_exp (CALL_EXPR, nargs + 3);
10540 TREE_TYPE (t) = return_type;
10541 CALL_EXPR_FN (t) = fn;
10542 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10543
10544 return t;
10545 }
10546
10547 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10548 FN and a null static chain slot. NARGS is the number of call arguments
10549 which are specified as "..." arguments. */
10550
10551 tree
10552 build_call_nary (tree return_type, tree fn, int nargs, ...)
10553 {
10554 tree ret;
10555 va_list args;
10556 va_start (args, nargs);
10557 ret = build_call_valist (return_type, fn, nargs, args);
10558 va_end (args);
10559 return ret;
10560 }
10561
10562 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10563 FN and a null static chain slot. NARGS is the number of call arguments
10564 which are specified as a va_list ARGS. */
10565
10566 tree
10567 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10568 {
10569 tree t;
10570 int i;
10571
10572 t = build_call_1 (return_type, fn, nargs);
10573 for (i = 0; i < nargs; i++)
10574 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10575 process_call_operands (t);
10576 return t;
10577 }
10578
10579 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10580 FN and a null static chain slot. NARGS is the number of call arguments
10581 which are specified as a tree array ARGS. */
10582
10583 tree
10584 build_call_array_loc (location_t loc, tree return_type, tree fn,
10585 int nargs, const tree *args)
10586 {
10587 tree t;
10588 int i;
10589
10590 t = build_call_1 (return_type, fn, nargs);
10591 for (i = 0; i < nargs; i++)
10592 CALL_EXPR_ARG (t, i) = args[i];
10593 process_call_operands (t);
10594 SET_EXPR_LOCATION (t, loc);
10595 return t;
10596 }
10597
10598 /* Like build_call_array, but takes a vec. */
10599
10600 tree
10601 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
10602 {
10603 tree ret, t;
10604 unsigned int ix;
10605
10606 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10607 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10608 CALL_EXPR_ARG (ret, ix) = t;
10609 process_call_operands (ret);
10610 return ret;
10611 }
10612
10613 /* Conveniently construct a function call expression. FNDECL names the
10614 function to be called and N arguments are passed in the array
10615 ARGARRAY. */
10616
10617 tree
10618 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10619 {
10620 tree fntype = TREE_TYPE (fndecl);
10621 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10622
10623 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
10624 }
10625
10626 /* Conveniently construct a function call expression. FNDECL names the
10627 function to be called and the arguments are passed in the vector
10628 VEC. */
10629
10630 tree
10631 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10632 {
10633 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10634 vec_safe_address (vec));
10635 }
10636
10637
10638 /* Conveniently construct a function call expression. FNDECL names the
10639 function to be called, N is the number of arguments, and the "..."
10640 parameters are the argument expressions. */
10641
10642 tree
10643 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10644 {
10645 va_list ap;
10646 tree *argarray = XALLOCAVEC (tree, n);
10647 int i;
10648
10649 va_start (ap, n);
10650 for (i = 0; i < n; i++)
10651 argarray[i] = va_arg (ap, tree);
10652 va_end (ap);
10653 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10654 }
10655
10656 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10657 varargs macros aren't supported by all bootstrap compilers. */
10658
10659 tree
10660 build_call_expr (tree fndecl, int n, ...)
10661 {
10662 va_list ap;
10663 tree *argarray = XALLOCAVEC (tree, n);
10664 int i;
10665
10666 va_start (ap, n);
10667 for (i = 0; i < n; i++)
10668 argarray[i] = va_arg (ap, tree);
10669 va_end (ap);
10670 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10671 }
10672
10673 /* Build internal call expression. This is just like CALL_EXPR, except
10674 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10675 internal function. */
10676
10677 tree
10678 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
10679 tree type, int n, ...)
10680 {
10681 va_list ap;
10682 int i;
10683
10684 tree fn = build_call_1 (type, NULL_TREE, n);
10685 va_start (ap, n);
10686 for (i = 0; i < n; i++)
10687 CALL_EXPR_ARG (fn, i) = va_arg (ap, tree);
10688 va_end (ap);
10689 SET_EXPR_LOCATION (fn, loc);
10690 CALL_EXPR_IFN (fn) = ifn;
10691 return fn;
10692 }
10693
10694 /* Create a new constant string literal and return a char* pointer to it.
10695 The STRING_CST value is the LEN characters at STR. */
10696 tree
10697 build_string_literal (int len, const char *str)
10698 {
10699 tree t, elem, index, type;
10700
10701 t = build_string (len, str);
10702 elem = build_type_variant (char_type_node, 1, 0);
10703 index = build_index_type (size_int (len - 1));
10704 type = build_array_type (elem, index);
10705 TREE_TYPE (t) = type;
10706 TREE_CONSTANT (t) = 1;
10707 TREE_READONLY (t) = 1;
10708 TREE_STATIC (t) = 1;
10709
10710 type = build_pointer_type (elem);
10711 t = build1 (ADDR_EXPR, type,
10712 build4 (ARRAY_REF, elem,
10713 t, integer_zero_node, NULL_TREE, NULL_TREE));
10714 return t;
10715 }
10716
10717
10718
10719 /* Return true if T (assumed to be a DECL) must be assigned a memory
10720 location. */
10721
10722 bool
10723 needs_to_live_in_memory (const_tree t)
10724 {
10725 return (TREE_ADDRESSABLE (t)
10726 || is_global_var (t)
10727 || (TREE_CODE (t) == RESULT_DECL
10728 && !DECL_BY_REFERENCE (t)
10729 && aggregate_value_p (t, current_function_decl)));
10730 }
10731
10732 /* Return value of a constant X and sign-extend it. */
10733
10734 HOST_WIDE_INT
10735 int_cst_value (const_tree x)
10736 {
10737 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10738 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
10739
10740 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10741 gcc_assert (cst_and_fits_in_hwi (x));
10742
10743 if (bits < HOST_BITS_PER_WIDE_INT)
10744 {
10745 bool negative = ((val >> (bits - 1)) & 1) != 0;
10746 if (negative)
10747 val |= (~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1;
10748 else
10749 val &= ~((~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1);
10750 }
10751
10752 return val;
10753 }
10754
10755 /* If TYPE is an integral or pointer type, return an integer type with
10756 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10757 if TYPE is already an integer type of signedness UNSIGNEDP. */
10758
10759 tree
10760 signed_or_unsigned_type_for (int unsignedp, tree type)
10761 {
10762 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
10763 return type;
10764
10765 if (TREE_CODE (type) == VECTOR_TYPE)
10766 {
10767 tree inner = TREE_TYPE (type);
10768 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10769 if (!inner2)
10770 return NULL_TREE;
10771 if (inner == inner2)
10772 return type;
10773 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
10774 }
10775
10776 if (!INTEGRAL_TYPE_P (type)
10777 && !POINTER_TYPE_P (type)
10778 && TREE_CODE (type) != OFFSET_TYPE)
10779 return NULL_TREE;
10780
10781 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
10782 }
10783
10784 /* If TYPE is an integral or pointer type, return an integer type with
10785 the same precision which is unsigned, or itself if TYPE is already an
10786 unsigned integer type. */
10787
10788 tree
10789 unsigned_type_for (tree type)
10790 {
10791 return signed_or_unsigned_type_for (1, type);
10792 }
10793
10794 /* If TYPE is an integral or pointer type, return an integer type with
10795 the same precision which is signed, or itself if TYPE is already a
10796 signed integer type. */
10797
10798 tree
10799 signed_type_for (tree type)
10800 {
10801 return signed_or_unsigned_type_for (0, type);
10802 }
10803
10804 /* If TYPE is a vector type, return a signed integer vector type with the
10805 same width and number of subparts. Otherwise return boolean_type_node. */
10806
10807 tree
10808 truth_type_for (tree type)
10809 {
10810 if (TREE_CODE (type) == VECTOR_TYPE)
10811 {
10812 tree elem = lang_hooks.types.type_for_size
10813 (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))), 0);
10814 return build_opaque_vector_type (elem, TYPE_VECTOR_SUBPARTS (type));
10815 }
10816 else
10817 return boolean_type_node;
10818 }
10819
10820 /* Returns the largest value obtainable by casting something in INNER type to
10821 OUTER type. */
10822
10823 tree
10824 upper_bound_in_type (tree outer, tree inner)
10825 {
10826 unsigned int det = 0;
10827 unsigned oprec = TYPE_PRECISION (outer);
10828 unsigned iprec = TYPE_PRECISION (inner);
10829 unsigned prec;
10830
10831 /* Compute a unique number for every combination. */
10832 det |= (oprec > iprec) ? 4 : 0;
10833 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
10834 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
10835
10836 /* Determine the exponent to use. */
10837 switch (det)
10838 {
10839 case 0:
10840 case 1:
10841 /* oprec <= iprec, outer: signed, inner: don't care. */
10842 prec = oprec - 1;
10843 break;
10844 case 2:
10845 case 3:
10846 /* oprec <= iprec, outer: unsigned, inner: don't care. */
10847 prec = oprec;
10848 break;
10849 case 4:
10850 /* oprec > iprec, outer: signed, inner: signed. */
10851 prec = iprec - 1;
10852 break;
10853 case 5:
10854 /* oprec > iprec, outer: signed, inner: unsigned. */
10855 prec = iprec;
10856 break;
10857 case 6:
10858 /* oprec > iprec, outer: unsigned, inner: signed. */
10859 prec = oprec;
10860 break;
10861 case 7:
10862 /* oprec > iprec, outer: unsigned, inner: unsigned. */
10863 prec = iprec;
10864 break;
10865 default:
10866 gcc_unreachable ();
10867 }
10868
10869 return wide_int_to_tree (outer,
10870 wi::mask (prec, false, TYPE_PRECISION (outer)));
10871 }
10872
10873 /* Returns the smallest value obtainable by casting something in INNER type to
10874 OUTER type. */
10875
10876 tree
10877 lower_bound_in_type (tree outer, tree inner)
10878 {
10879 unsigned oprec = TYPE_PRECISION (outer);
10880 unsigned iprec = TYPE_PRECISION (inner);
10881
10882 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
10883 and obtain 0. */
10884 if (TYPE_UNSIGNED (outer)
10885 /* If we are widening something of an unsigned type, OUTER type
10886 contains all values of INNER type. In particular, both INNER
10887 and OUTER types have zero in common. */
10888 || (oprec > iprec && TYPE_UNSIGNED (inner)))
10889 return build_int_cst (outer, 0);
10890 else
10891 {
10892 /* If we are widening a signed type to another signed type, we
10893 want to obtain -2^^(iprec-1). If we are keeping the
10894 precision or narrowing to a signed type, we want to obtain
10895 -2^(oprec-1). */
10896 unsigned prec = oprec > iprec ? iprec : oprec;
10897 return wide_int_to_tree (outer,
10898 wi::mask (prec - 1, true,
10899 TYPE_PRECISION (outer)));
10900 }
10901 }
10902
10903 /* Return nonzero if two operands that are suitable for PHI nodes are
10904 necessarily equal. Specifically, both ARG0 and ARG1 must be either
10905 SSA_NAME or invariant. Note that this is strictly an optimization.
10906 That is, callers of this function can directly call operand_equal_p
10907 and get the same result, only slower. */
10908
10909 int
10910 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
10911 {
10912 if (arg0 == arg1)
10913 return 1;
10914 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
10915 return 0;
10916 return operand_equal_p (arg0, arg1, 0);
10917 }
10918
10919 /* Returns number of zeros at the end of binary representation of X. */
10920
10921 tree
10922 num_ending_zeros (const_tree x)
10923 {
10924 return build_int_cst (TREE_TYPE (x), wi::ctz (x));
10925 }
10926
10927
10928 #define WALK_SUBTREE(NODE) \
10929 do \
10930 { \
10931 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
10932 if (result) \
10933 return result; \
10934 } \
10935 while (0)
10936
10937 /* This is a subroutine of walk_tree that walks field of TYPE that are to
10938 be walked whenever a type is seen in the tree. Rest of operands and return
10939 value are as for walk_tree. */
10940
10941 static tree
10942 walk_type_fields (tree type, walk_tree_fn func, void *data,
10943 hash_set<tree> *pset, walk_tree_lh lh)
10944 {
10945 tree result = NULL_TREE;
10946
10947 switch (TREE_CODE (type))
10948 {
10949 case POINTER_TYPE:
10950 case REFERENCE_TYPE:
10951 case VECTOR_TYPE:
10952 /* We have to worry about mutually recursive pointers. These can't
10953 be written in C. They can in Ada. It's pathological, but
10954 there's an ACATS test (c38102a) that checks it. Deal with this
10955 by checking if we're pointing to another pointer, that one
10956 points to another pointer, that one does too, and we have no htab.
10957 If so, get a hash table. We check three levels deep to avoid
10958 the cost of the hash table if we don't need one. */
10959 if (POINTER_TYPE_P (TREE_TYPE (type))
10960 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
10961 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
10962 && !pset)
10963 {
10964 result = walk_tree_without_duplicates (&TREE_TYPE (type),
10965 func, data);
10966 if (result)
10967 return result;
10968
10969 break;
10970 }
10971
10972 /* ... fall through ... */
10973
10974 case COMPLEX_TYPE:
10975 WALK_SUBTREE (TREE_TYPE (type));
10976 break;
10977
10978 case METHOD_TYPE:
10979 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
10980
10981 /* Fall through. */
10982
10983 case FUNCTION_TYPE:
10984 WALK_SUBTREE (TREE_TYPE (type));
10985 {
10986 tree arg;
10987
10988 /* We never want to walk into default arguments. */
10989 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
10990 WALK_SUBTREE (TREE_VALUE (arg));
10991 }
10992 break;
10993
10994 case ARRAY_TYPE:
10995 /* Don't follow this nodes's type if a pointer for fear that
10996 we'll have infinite recursion. If we have a PSET, then we
10997 need not fear. */
10998 if (pset
10999 || (!POINTER_TYPE_P (TREE_TYPE (type))
11000 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
11001 WALK_SUBTREE (TREE_TYPE (type));
11002 WALK_SUBTREE (TYPE_DOMAIN (type));
11003 break;
11004
11005 case OFFSET_TYPE:
11006 WALK_SUBTREE (TREE_TYPE (type));
11007 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
11008 break;
11009
11010 default:
11011 break;
11012 }
11013
11014 return NULL_TREE;
11015 }
11016
11017 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11018 called with the DATA and the address of each sub-tree. If FUNC returns a
11019 non-NULL value, the traversal is stopped, and the value returned by FUNC
11020 is returned. If PSET is non-NULL it is used to record the nodes visited,
11021 and to avoid visiting a node more than once. */
11022
11023 tree
11024 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
11025 hash_set<tree> *pset, walk_tree_lh lh)
11026 {
11027 enum tree_code code;
11028 int walk_subtrees;
11029 tree result;
11030
11031 #define WALK_SUBTREE_TAIL(NODE) \
11032 do \
11033 { \
11034 tp = & (NODE); \
11035 goto tail_recurse; \
11036 } \
11037 while (0)
11038
11039 tail_recurse:
11040 /* Skip empty subtrees. */
11041 if (!*tp)
11042 return NULL_TREE;
11043
11044 /* Don't walk the same tree twice, if the user has requested
11045 that we avoid doing so. */
11046 if (pset && pset->add (*tp))
11047 return NULL_TREE;
11048
11049 /* Call the function. */
11050 walk_subtrees = 1;
11051 result = (*func) (tp, &walk_subtrees, data);
11052
11053 /* If we found something, return it. */
11054 if (result)
11055 return result;
11056
11057 code = TREE_CODE (*tp);
11058
11059 /* Even if we didn't, FUNC may have decided that there was nothing
11060 interesting below this point in the tree. */
11061 if (!walk_subtrees)
11062 {
11063 /* But we still need to check our siblings. */
11064 if (code == TREE_LIST)
11065 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11066 else if (code == OMP_CLAUSE)
11067 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11068 else
11069 return NULL_TREE;
11070 }
11071
11072 if (lh)
11073 {
11074 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11075 if (result || !walk_subtrees)
11076 return result;
11077 }
11078
11079 switch (code)
11080 {
11081 case ERROR_MARK:
11082 case IDENTIFIER_NODE:
11083 case INTEGER_CST:
11084 case REAL_CST:
11085 case FIXED_CST:
11086 case VECTOR_CST:
11087 case STRING_CST:
11088 case BLOCK:
11089 case PLACEHOLDER_EXPR:
11090 case SSA_NAME:
11091 case FIELD_DECL:
11092 case RESULT_DECL:
11093 /* None of these have subtrees other than those already walked
11094 above. */
11095 break;
11096
11097 case TREE_LIST:
11098 WALK_SUBTREE (TREE_VALUE (*tp));
11099 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11100 break;
11101
11102 case TREE_VEC:
11103 {
11104 int len = TREE_VEC_LENGTH (*tp);
11105
11106 if (len == 0)
11107 break;
11108
11109 /* Walk all elements but the first. */
11110 while (--len)
11111 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11112
11113 /* Now walk the first one as a tail call. */
11114 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11115 }
11116
11117 case COMPLEX_CST:
11118 WALK_SUBTREE (TREE_REALPART (*tp));
11119 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11120
11121 case CONSTRUCTOR:
11122 {
11123 unsigned HOST_WIDE_INT idx;
11124 constructor_elt *ce;
11125
11126 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11127 idx++)
11128 WALK_SUBTREE (ce->value);
11129 }
11130 break;
11131
11132 case SAVE_EXPR:
11133 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11134
11135 case BIND_EXPR:
11136 {
11137 tree decl;
11138 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11139 {
11140 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11141 into declarations that are just mentioned, rather than
11142 declared; they don't really belong to this part of the tree.
11143 And, we can see cycles: the initializer for a declaration
11144 can refer to the declaration itself. */
11145 WALK_SUBTREE (DECL_INITIAL (decl));
11146 WALK_SUBTREE (DECL_SIZE (decl));
11147 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11148 }
11149 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11150 }
11151
11152 case STATEMENT_LIST:
11153 {
11154 tree_stmt_iterator i;
11155 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11156 WALK_SUBTREE (*tsi_stmt_ptr (i));
11157 }
11158 break;
11159
11160 case OMP_CLAUSE:
11161 switch (OMP_CLAUSE_CODE (*tp))
11162 {
11163 case OMP_CLAUSE_PRIVATE:
11164 case OMP_CLAUSE_SHARED:
11165 case OMP_CLAUSE_FIRSTPRIVATE:
11166 case OMP_CLAUSE_COPYIN:
11167 case OMP_CLAUSE_COPYPRIVATE:
11168 case OMP_CLAUSE_FINAL:
11169 case OMP_CLAUSE_IF:
11170 case OMP_CLAUSE_NUM_THREADS:
11171 case OMP_CLAUSE_SCHEDULE:
11172 case OMP_CLAUSE_UNIFORM:
11173 case OMP_CLAUSE_DEPEND:
11174 case OMP_CLAUSE_NUM_TEAMS:
11175 case OMP_CLAUSE_THREAD_LIMIT:
11176 case OMP_CLAUSE_DEVICE:
11177 case OMP_CLAUSE_DIST_SCHEDULE:
11178 case OMP_CLAUSE_SAFELEN:
11179 case OMP_CLAUSE_SIMDLEN:
11180 case OMP_CLAUSE__LOOPTEMP_:
11181 case OMP_CLAUSE__SIMDUID_:
11182 case OMP_CLAUSE__CILK_FOR_COUNT_:
11183 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11184 /* FALLTHRU */
11185
11186 case OMP_CLAUSE_NOWAIT:
11187 case OMP_CLAUSE_ORDERED:
11188 case OMP_CLAUSE_DEFAULT:
11189 case OMP_CLAUSE_UNTIED:
11190 case OMP_CLAUSE_MERGEABLE:
11191 case OMP_CLAUSE_PROC_BIND:
11192 case OMP_CLAUSE_INBRANCH:
11193 case OMP_CLAUSE_NOTINBRANCH:
11194 case OMP_CLAUSE_FOR:
11195 case OMP_CLAUSE_PARALLEL:
11196 case OMP_CLAUSE_SECTIONS:
11197 case OMP_CLAUSE_TASKGROUP:
11198 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11199
11200 case OMP_CLAUSE_LASTPRIVATE:
11201 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11202 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11203 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11204
11205 case OMP_CLAUSE_COLLAPSE:
11206 {
11207 int i;
11208 for (i = 0; i < 3; i++)
11209 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11210 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11211 }
11212
11213 case OMP_CLAUSE_LINEAR:
11214 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11215 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
11216 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
11217 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11218
11219 case OMP_CLAUSE_ALIGNED:
11220 case OMP_CLAUSE_FROM:
11221 case OMP_CLAUSE_TO:
11222 case OMP_CLAUSE_MAP:
11223 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11224 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11225 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11226
11227 case OMP_CLAUSE_REDUCTION:
11228 {
11229 int i;
11230 for (i = 0; i < 4; i++)
11231 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11232 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11233 }
11234
11235 default:
11236 gcc_unreachable ();
11237 }
11238 break;
11239
11240 case TARGET_EXPR:
11241 {
11242 int i, len;
11243
11244 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11245 But, we only want to walk once. */
11246 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11247 for (i = 0; i < len; ++i)
11248 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11249 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11250 }
11251
11252 case DECL_EXPR:
11253 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11254 defining. We only want to walk into these fields of a type in this
11255 case and not in the general case of a mere reference to the type.
11256
11257 The criterion is as follows: if the field can be an expression, it
11258 must be walked only here. This should be in keeping with the fields
11259 that are directly gimplified in gimplify_type_sizes in order for the
11260 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11261 variable-sized types.
11262
11263 Note that DECLs get walked as part of processing the BIND_EXPR. */
11264 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11265 {
11266 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11267 if (TREE_CODE (*type_p) == ERROR_MARK)
11268 return NULL_TREE;
11269
11270 /* Call the function for the type. See if it returns anything or
11271 doesn't want us to continue. If we are to continue, walk both
11272 the normal fields and those for the declaration case. */
11273 result = (*func) (type_p, &walk_subtrees, data);
11274 if (result || !walk_subtrees)
11275 return result;
11276
11277 /* But do not walk a pointed-to type since it may itself need to
11278 be walked in the declaration case if it isn't anonymous. */
11279 if (!POINTER_TYPE_P (*type_p))
11280 {
11281 result = walk_type_fields (*type_p, func, data, pset, lh);
11282 if (result)
11283 return result;
11284 }
11285
11286 /* If this is a record type, also walk the fields. */
11287 if (RECORD_OR_UNION_TYPE_P (*type_p))
11288 {
11289 tree field;
11290
11291 for (field = TYPE_FIELDS (*type_p); field;
11292 field = DECL_CHAIN (field))
11293 {
11294 /* We'd like to look at the type of the field, but we can
11295 easily get infinite recursion. So assume it's pointed
11296 to elsewhere in the tree. Also, ignore things that
11297 aren't fields. */
11298 if (TREE_CODE (field) != FIELD_DECL)
11299 continue;
11300
11301 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11302 WALK_SUBTREE (DECL_SIZE (field));
11303 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11304 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11305 WALK_SUBTREE (DECL_QUALIFIER (field));
11306 }
11307 }
11308
11309 /* Same for scalar types. */
11310 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11311 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11312 || TREE_CODE (*type_p) == INTEGER_TYPE
11313 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11314 || TREE_CODE (*type_p) == REAL_TYPE)
11315 {
11316 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11317 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11318 }
11319
11320 WALK_SUBTREE (TYPE_SIZE (*type_p));
11321 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11322 }
11323 /* FALLTHRU */
11324
11325 default:
11326 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11327 {
11328 int i, len;
11329
11330 /* Walk over all the sub-trees of this operand. */
11331 len = TREE_OPERAND_LENGTH (*tp);
11332
11333 /* Go through the subtrees. We need to do this in forward order so
11334 that the scope of a FOR_EXPR is handled properly. */
11335 if (len)
11336 {
11337 for (i = 0; i < len - 1; ++i)
11338 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11339 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11340 }
11341 }
11342 /* If this is a type, walk the needed fields in the type. */
11343 else if (TYPE_P (*tp))
11344 return walk_type_fields (*tp, func, data, pset, lh);
11345 break;
11346 }
11347
11348 /* We didn't find what we were looking for. */
11349 return NULL_TREE;
11350
11351 #undef WALK_SUBTREE_TAIL
11352 }
11353 #undef WALK_SUBTREE
11354
11355 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11356
11357 tree
11358 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11359 walk_tree_lh lh)
11360 {
11361 tree result;
11362
11363 hash_set<tree> pset;
11364 result = walk_tree_1 (tp, func, data, &pset, lh);
11365 return result;
11366 }
11367
11368
11369 tree
11370 tree_block (tree t)
11371 {
11372 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11373
11374 if (IS_EXPR_CODE_CLASS (c))
11375 return LOCATION_BLOCK (t->exp.locus);
11376 gcc_unreachable ();
11377 return NULL;
11378 }
11379
11380 void
11381 tree_set_block (tree t, tree b)
11382 {
11383 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11384
11385 if (IS_EXPR_CODE_CLASS (c))
11386 {
11387 if (b)
11388 t->exp.locus = COMBINE_LOCATION_DATA (line_table, t->exp.locus, b);
11389 else
11390 t->exp.locus = LOCATION_LOCUS (t->exp.locus);
11391 }
11392 else
11393 gcc_unreachable ();
11394 }
11395
11396 /* Create a nameless artificial label and put it in the current
11397 function context. The label has a location of LOC. Returns the
11398 newly created label. */
11399
11400 tree
11401 create_artificial_label (location_t loc)
11402 {
11403 tree lab = build_decl (loc,
11404 LABEL_DECL, NULL_TREE, void_type_node);
11405
11406 DECL_ARTIFICIAL (lab) = 1;
11407 DECL_IGNORED_P (lab) = 1;
11408 DECL_CONTEXT (lab) = current_function_decl;
11409 return lab;
11410 }
11411
11412 /* Given a tree, try to return a useful variable name that we can use
11413 to prefix a temporary that is being assigned the value of the tree.
11414 I.E. given <temp> = &A, return A. */
11415
11416 const char *
11417 get_name (tree t)
11418 {
11419 tree stripped_decl;
11420
11421 stripped_decl = t;
11422 STRIP_NOPS (stripped_decl);
11423 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11424 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11425 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11426 {
11427 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11428 if (!name)
11429 return NULL;
11430 return IDENTIFIER_POINTER (name);
11431 }
11432 else
11433 {
11434 switch (TREE_CODE (stripped_decl))
11435 {
11436 case ADDR_EXPR:
11437 return get_name (TREE_OPERAND (stripped_decl, 0));
11438 default:
11439 return NULL;
11440 }
11441 }
11442 }
11443
11444 /* Return true if TYPE has a variable argument list. */
11445
11446 bool
11447 stdarg_p (const_tree fntype)
11448 {
11449 function_args_iterator args_iter;
11450 tree n = NULL_TREE, t;
11451
11452 if (!fntype)
11453 return false;
11454
11455 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11456 {
11457 n = t;
11458 }
11459
11460 return n != NULL_TREE && n != void_type_node;
11461 }
11462
11463 /* Return true if TYPE has a prototype. */
11464
11465 bool
11466 prototype_p (tree fntype)
11467 {
11468 tree t;
11469
11470 gcc_assert (fntype != NULL_TREE);
11471
11472 t = TYPE_ARG_TYPES (fntype);
11473 return (t != NULL_TREE);
11474 }
11475
11476 /* If BLOCK is inlined from an __attribute__((__artificial__))
11477 routine, return pointer to location from where it has been
11478 called. */
11479 location_t *
11480 block_nonartificial_location (tree block)
11481 {
11482 location_t *ret = NULL;
11483
11484 while (block && TREE_CODE (block) == BLOCK
11485 && BLOCK_ABSTRACT_ORIGIN (block))
11486 {
11487 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11488
11489 while (TREE_CODE (ao) == BLOCK
11490 && BLOCK_ABSTRACT_ORIGIN (ao)
11491 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
11492 ao = BLOCK_ABSTRACT_ORIGIN (ao);
11493
11494 if (TREE_CODE (ao) == FUNCTION_DECL)
11495 {
11496 /* If AO is an artificial inline, point RET to the
11497 call site locus at which it has been inlined and continue
11498 the loop, in case AO's caller is also an artificial
11499 inline. */
11500 if (DECL_DECLARED_INLINE_P (ao)
11501 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11502 ret = &BLOCK_SOURCE_LOCATION (block);
11503 else
11504 break;
11505 }
11506 else if (TREE_CODE (ao) != BLOCK)
11507 break;
11508
11509 block = BLOCK_SUPERCONTEXT (block);
11510 }
11511 return ret;
11512 }
11513
11514
11515 /* If EXP is inlined from an __attribute__((__artificial__))
11516 function, return the location of the original call expression. */
11517
11518 location_t
11519 tree_nonartificial_location (tree exp)
11520 {
11521 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11522
11523 if (loc)
11524 return *loc;
11525 else
11526 return EXPR_LOCATION (exp);
11527 }
11528
11529
11530 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
11531 nodes. */
11532
11533 /* Return the hash code code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11534
11535 hashval_t
11536 cl_option_hasher::hash (tree x)
11537 {
11538 const_tree const t = x;
11539 const char *p;
11540 size_t i;
11541 size_t len = 0;
11542 hashval_t hash = 0;
11543
11544 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11545 {
11546 p = (const char *)TREE_OPTIMIZATION (t);
11547 len = sizeof (struct cl_optimization);
11548 }
11549
11550 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11551 return cl_target_option_hash (TREE_TARGET_OPTION (t));
11552
11553 else
11554 gcc_unreachable ();
11555
11556 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11557 something else. */
11558 for (i = 0; i < len; i++)
11559 if (p[i])
11560 hash = (hash << 4) ^ ((i << 2) | p[i]);
11561
11562 return hash;
11563 }
11564
11565 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11566 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11567 same. */
11568
11569 bool
11570 cl_option_hasher::equal (tree x, tree y)
11571 {
11572 const_tree const xt = x;
11573 const_tree const yt = y;
11574 const char *xp;
11575 const char *yp;
11576 size_t len;
11577
11578 if (TREE_CODE (xt) != TREE_CODE (yt))
11579 return 0;
11580
11581 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11582 {
11583 xp = (const char *)TREE_OPTIMIZATION (xt);
11584 yp = (const char *)TREE_OPTIMIZATION (yt);
11585 len = sizeof (struct cl_optimization);
11586 }
11587
11588 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11589 {
11590 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
11591 TREE_TARGET_OPTION (yt));
11592 }
11593
11594 else
11595 gcc_unreachable ();
11596
11597 return (memcmp (xp, yp, len) == 0);
11598 }
11599
11600 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
11601
11602 tree
11603 build_optimization_node (struct gcc_options *opts)
11604 {
11605 tree t;
11606
11607 /* Use the cache of optimization nodes. */
11608
11609 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11610 opts);
11611
11612 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
11613 t = *slot;
11614 if (!t)
11615 {
11616 /* Insert this one into the hash table. */
11617 t = cl_optimization_node;
11618 *slot = t;
11619
11620 /* Make a new node for next time round. */
11621 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11622 }
11623
11624 return t;
11625 }
11626
11627 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
11628
11629 tree
11630 build_target_option_node (struct gcc_options *opts)
11631 {
11632 tree t;
11633
11634 /* Use the cache of optimization nodes. */
11635
11636 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11637 opts);
11638
11639 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
11640 t = *slot;
11641 if (!t)
11642 {
11643 /* Insert this one into the hash table. */
11644 t = cl_target_option_node;
11645 *slot = t;
11646
11647 /* Make a new node for next time round. */
11648 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11649 }
11650
11651 return t;
11652 }
11653
11654 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11655 so that they aren't saved during PCH writing. */
11656
11657 void
11658 prepare_target_option_nodes_for_pch (void)
11659 {
11660 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
11661 for (; iter != cl_option_hash_table->end (); ++iter)
11662 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
11663 TREE_TARGET_GLOBALS (*iter) = NULL;
11664 }
11665
11666 /* Determine the "ultimate origin" of a block. The block may be an inlined
11667 instance of an inlined instance of a block which is local to an inline
11668 function, so we have to trace all of the way back through the origin chain
11669 to find out what sort of node actually served as the original seed for the
11670 given block. */
11671
11672 tree
11673 block_ultimate_origin (const_tree block)
11674 {
11675 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
11676
11677 /* BLOCK_ABSTRACT_ORIGIN can point to itself; ignore that if
11678 we're trying to output the abstract instance of this function. */
11679 if (BLOCK_ABSTRACT (block) && immediate_origin == block)
11680 return NULL_TREE;
11681
11682 if (immediate_origin == NULL_TREE)
11683 return NULL_TREE;
11684 else
11685 {
11686 tree ret_val;
11687 tree lookahead = immediate_origin;
11688
11689 do
11690 {
11691 ret_val = lookahead;
11692 lookahead = (TREE_CODE (ret_val) == BLOCK
11693 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
11694 }
11695 while (lookahead != NULL && lookahead != ret_val);
11696
11697 /* The block's abstract origin chain may not be the *ultimate* origin of
11698 the block. It could lead to a DECL that has an abstract origin set.
11699 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
11700 will give us if it has one). Note that DECL's abstract origins are
11701 supposed to be the most distant ancestor (or so decl_ultimate_origin
11702 claims), so we don't need to loop following the DECL origins. */
11703 if (DECL_P (ret_val))
11704 return DECL_ORIGIN (ret_val);
11705
11706 return ret_val;
11707 }
11708 }
11709
11710 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
11711 no instruction. */
11712
11713 bool
11714 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
11715 {
11716 /* Use precision rather then machine mode when we can, which gives
11717 the correct answer even for submode (bit-field) types. */
11718 if ((INTEGRAL_TYPE_P (outer_type)
11719 || POINTER_TYPE_P (outer_type)
11720 || TREE_CODE (outer_type) == OFFSET_TYPE)
11721 && (INTEGRAL_TYPE_P (inner_type)
11722 || POINTER_TYPE_P (inner_type)
11723 || TREE_CODE (inner_type) == OFFSET_TYPE))
11724 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
11725
11726 /* Otherwise fall back on comparing machine modes (e.g. for
11727 aggregate types, floats). */
11728 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
11729 }
11730
11731 /* Return true iff conversion in EXP generates no instruction. Mark
11732 it inline so that we fully inline into the stripping functions even
11733 though we have two uses of this function. */
11734
11735 static inline bool
11736 tree_nop_conversion (const_tree exp)
11737 {
11738 tree outer_type, inner_type;
11739
11740 if (!CONVERT_EXPR_P (exp)
11741 && TREE_CODE (exp) != NON_LVALUE_EXPR)
11742 return false;
11743 if (TREE_OPERAND (exp, 0) == error_mark_node)
11744 return false;
11745
11746 outer_type = TREE_TYPE (exp);
11747 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11748
11749 if (!inner_type)
11750 return false;
11751
11752 return tree_nop_conversion_p (outer_type, inner_type);
11753 }
11754
11755 /* Return true iff conversion in EXP generates no instruction. Don't
11756 consider conversions changing the signedness. */
11757
11758 static bool
11759 tree_sign_nop_conversion (const_tree exp)
11760 {
11761 tree outer_type, inner_type;
11762
11763 if (!tree_nop_conversion (exp))
11764 return false;
11765
11766 outer_type = TREE_TYPE (exp);
11767 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11768
11769 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
11770 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
11771 }
11772
11773 /* Strip conversions from EXP according to tree_nop_conversion and
11774 return the resulting expression. */
11775
11776 tree
11777 tree_strip_nop_conversions (tree exp)
11778 {
11779 while (tree_nop_conversion (exp))
11780 exp = TREE_OPERAND (exp, 0);
11781 return exp;
11782 }
11783
11784 /* Strip conversions from EXP according to tree_sign_nop_conversion
11785 and return the resulting expression. */
11786
11787 tree
11788 tree_strip_sign_nop_conversions (tree exp)
11789 {
11790 while (tree_sign_nop_conversion (exp))
11791 exp = TREE_OPERAND (exp, 0);
11792 return exp;
11793 }
11794
11795 /* Avoid any floating point extensions from EXP. */
11796 tree
11797 strip_float_extensions (tree exp)
11798 {
11799 tree sub, expt, subt;
11800
11801 /* For floating point constant look up the narrowest type that can hold
11802 it properly and handle it like (type)(narrowest_type)constant.
11803 This way we can optimize for instance a=a*2.0 where "a" is float
11804 but 2.0 is double constant. */
11805 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
11806 {
11807 REAL_VALUE_TYPE orig;
11808 tree type = NULL;
11809
11810 orig = TREE_REAL_CST (exp);
11811 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
11812 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
11813 type = float_type_node;
11814 else if (TYPE_PRECISION (TREE_TYPE (exp))
11815 > TYPE_PRECISION (double_type_node)
11816 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
11817 type = double_type_node;
11818 if (type)
11819 return build_real (type, real_value_truncate (TYPE_MODE (type), orig));
11820 }
11821
11822 if (!CONVERT_EXPR_P (exp))
11823 return exp;
11824
11825 sub = TREE_OPERAND (exp, 0);
11826 subt = TREE_TYPE (sub);
11827 expt = TREE_TYPE (exp);
11828
11829 if (!FLOAT_TYPE_P (subt))
11830 return exp;
11831
11832 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
11833 return exp;
11834
11835 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
11836 return exp;
11837
11838 return strip_float_extensions (sub);
11839 }
11840
11841 /* Strip out all handled components that produce invariant
11842 offsets. */
11843
11844 const_tree
11845 strip_invariant_refs (const_tree op)
11846 {
11847 while (handled_component_p (op))
11848 {
11849 switch (TREE_CODE (op))
11850 {
11851 case ARRAY_REF:
11852 case ARRAY_RANGE_REF:
11853 if (!is_gimple_constant (TREE_OPERAND (op, 1))
11854 || TREE_OPERAND (op, 2) != NULL_TREE
11855 || TREE_OPERAND (op, 3) != NULL_TREE)
11856 return NULL;
11857 break;
11858
11859 case COMPONENT_REF:
11860 if (TREE_OPERAND (op, 2) != NULL_TREE)
11861 return NULL;
11862 break;
11863
11864 default:;
11865 }
11866 op = TREE_OPERAND (op, 0);
11867 }
11868
11869 return op;
11870 }
11871
11872 static GTY(()) tree gcc_eh_personality_decl;
11873
11874 /* Return the GCC personality function decl. */
11875
11876 tree
11877 lhd_gcc_personality (void)
11878 {
11879 if (!gcc_eh_personality_decl)
11880 gcc_eh_personality_decl = build_personality_function ("gcc");
11881 return gcc_eh_personality_decl;
11882 }
11883
11884 /* TARGET is a call target of GIMPLE call statement
11885 (obtained by gimple_call_fn). Return true if it is
11886 OBJ_TYPE_REF representing an virtual call of C++ method.
11887 (As opposed to OBJ_TYPE_REF representing objc calls
11888 through a cast where middle-end devirtualization machinery
11889 can't apply.) */
11890
11891 bool
11892 virtual_method_call_p (tree target)
11893 {
11894 if (TREE_CODE (target) != OBJ_TYPE_REF)
11895 return false;
11896 tree t = TREE_TYPE (target);
11897 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
11898 t = TREE_TYPE (t);
11899 if (TREE_CODE (t) == FUNCTION_TYPE)
11900 return false;
11901 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
11902 /* If we do not have BINFO associated, it means that type was built
11903 without devirtualization enabled. Do not consider this a virtual
11904 call. */
11905 if (!TYPE_BINFO (obj_type_ref_class (target)))
11906 return false;
11907 return true;
11908 }
11909
11910 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
11911
11912 tree
11913 obj_type_ref_class (tree ref)
11914 {
11915 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
11916 ref = TREE_TYPE (ref);
11917 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
11918 ref = TREE_TYPE (ref);
11919 /* We look for type THIS points to. ObjC also builds
11920 OBJ_TYPE_REF with non-method calls, Their first parameter
11921 ID however also corresponds to class type. */
11922 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
11923 || TREE_CODE (ref) == FUNCTION_TYPE);
11924 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
11925 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
11926 return TREE_TYPE (ref);
11927 }
11928
11929 /* Return true if T is in anonymous namespace. */
11930
11931 bool
11932 type_in_anonymous_namespace_p (const_tree t)
11933 {
11934 /* TREE_PUBLIC of TYPE_STUB_DECL may not be properly set for
11935 bulitin types; those have CONTEXT NULL. */
11936 if (!TYPE_CONTEXT (t))
11937 return false;
11938 return (TYPE_STUB_DECL (t) && !TREE_PUBLIC (TYPE_STUB_DECL (t)));
11939 }
11940
11941 /* Try to find a base info of BINFO that would have its field decl at offset
11942 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
11943 found, return, otherwise return NULL_TREE. */
11944
11945 tree
11946 get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
11947 {
11948 tree type = BINFO_TYPE (binfo);
11949
11950 while (true)
11951 {
11952 HOST_WIDE_INT pos, size;
11953 tree fld;
11954 int i;
11955
11956 if (types_same_for_odr (type, expected_type))
11957 return binfo;
11958 if (offset < 0)
11959 return NULL_TREE;
11960
11961 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
11962 {
11963 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
11964 continue;
11965
11966 pos = int_bit_position (fld);
11967 size = tree_to_uhwi (DECL_SIZE (fld));
11968 if (pos <= offset && (pos + size) > offset)
11969 break;
11970 }
11971 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
11972 return NULL_TREE;
11973
11974 /* Offset 0 indicates the primary base, whose vtable contents are
11975 represented in the binfo for the derived class. */
11976 else if (offset != 0)
11977 {
11978 tree base_binfo, binfo2 = binfo;
11979
11980 /* Find BINFO corresponding to FLD. This is bit harder
11981 by a fact that in virtual inheritance we may need to walk down
11982 the non-virtual inheritance chain. */
11983 while (true)
11984 {
11985 tree containing_binfo = NULL, found_binfo = NULL;
11986 for (i = 0; BINFO_BASE_ITERATE (binfo2, i, base_binfo); i++)
11987 if (types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
11988 {
11989 found_binfo = base_binfo;
11990 break;
11991 }
11992 else
11993 if ((tree_to_shwi (BINFO_OFFSET (base_binfo))
11994 - tree_to_shwi (BINFO_OFFSET (binfo)))
11995 * BITS_PER_UNIT < pos
11996 /* Rule out types with no virtual methods or we can get confused
11997 here by zero sized bases. */
11998 && TYPE_BINFO (BINFO_TYPE (base_binfo))
11999 && BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (base_binfo)))
12000 && (!containing_binfo
12001 || (tree_to_shwi (BINFO_OFFSET (containing_binfo))
12002 < tree_to_shwi (BINFO_OFFSET (base_binfo)))))
12003 containing_binfo = base_binfo;
12004 if (found_binfo)
12005 {
12006 binfo = found_binfo;
12007 break;
12008 }
12009 if (!containing_binfo)
12010 return NULL_TREE;
12011 binfo2 = containing_binfo;
12012 }
12013 }
12014
12015 type = TREE_TYPE (fld);
12016 offset -= pos;
12017 }
12018 }
12019
12020 /* Returns true if X is a typedef decl. */
12021
12022 bool
12023 is_typedef_decl (tree x)
12024 {
12025 return (x && TREE_CODE (x) == TYPE_DECL
12026 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
12027 }
12028
12029 /* Returns true iff TYPE is a type variant created for a typedef. */
12030
12031 bool
12032 typedef_variant_p (tree type)
12033 {
12034 return is_typedef_decl (TYPE_NAME (type));
12035 }
12036
12037 /* Warn about a use of an identifier which was marked deprecated. */
12038 void
12039 warn_deprecated_use (tree node, tree attr)
12040 {
12041 const char *msg;
12042
12043 if (node == 0 || !warn_deprecated_decl)
12044 return;
12045
12046 if (!attr)
12047 {
12048 if (DECL_P (node))
12049 attr = DECL_ATTRIBUTES (node);
12050 else if (TYPE_P (node))
12051 {
12052 tree decl = TYPE_STUB_DECL (node);
12053 if (decl)
12054 attr = lookup_attribute ("deprecated",
12055 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12056 }
12057 }
12058
12059 if (attr)
12060 attr = lookup_attribute ("deprecated", attr);
12061
12062 if (attr)
12063 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
12064 else
12065 msg = NULL;
12066
12067 bool w;
12068 if (DECL_P (node))
12069 {
12070 if (msg)
12071 w = warning (OPT_Wdeprecated_declarations,
12072 "%qD is deprecated: %s", node, msg);
12073 else
12074 w = warning (OPT_Wdeprecated_declarations,
12075 "%qD is deprecated", node);
12076 if (w)
12077 inform (DECL_SOURCE_LOCATION (node), "declared here");
12078 }
12079 else if (TYPE_P (node))
12080 {
12081 tree what = NULL_TREE;
12082 tree decl = TYPE_STUB_DECL (node);
12083
12084 if (TYPE_NAME (node))
12085 {
12086 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12087 what = TYPE_NAME (node);
12088 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12089 && DECL_NAME (TYPE_NAME (node)))
12090 what = DECL_NAME (TYPE_NAME (node));
12091 }
12092
12093 if (decl)
12094 {
12095 if (what)
12096 {
12097 if (msg)
12098 w = warning (OPT_Wdeprecated_declarations,
12099 "%qE is deprecated: %s", what, msg);
12100 else
12101 w = warning (OPT_Wdeprecated_declarations,
12102 "%qE is deprecated", what);
12103 }
12104 else
12105 {
12106 if (msg)
12107 w = warning (OPT_Wdeprecated_declarations,
12108 "type is deprecated: %s", msg);
12109 else
12110 w = warning (OPT_Wdeprecated_declarations,
12111 "type is deprecated");
12112 }
12113 if (w)
12114 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12115 }
12116 else
12117 {
12118 if (what)
12119 {
12120 if (msg)
12121 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
12122 what, msg);
12123 else
12124 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
12125 }
12126 else
12127 {
12128 if (msg)
12129 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
12130 msg);
12131 else
12132 warning (OPT_Wdeprecated_declarations, "type is deprecated");
12133 }
12134 }
12135 }
12136 }
12137
12138 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12139 somewhere in it. */
12140
12141 bool
12142 contains_bitfld_component_ref_p (const_tree ref)
12143 {
12144 while (handled_component_p (ref))
12145 {
12146 if (TREE_CODE (ref) == COMPONENT_REF
12147 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12148 return true;
12149 ref = TREE_OPERAND (ref, 0);
12150 }
12151
12152 return false;
12153 }
12154
12155 /* Try to determine whether a TRY_CATCH expression can fall through.
12156 This is a subroutine of block_may_fallthru. */
12157
12158 static bool
12159 try_catch_may_fallthru (const_tree stmt)
12160 {
12161 tree_stmt_iterator i;
12162
12163 /* If the TRY block can fall through, the whole TRY_CATCH can
12164 fall through. */
12165 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12166 return true;
12167
12168 i = tsi_start (TREE_OPERAND (stmt, 1));
12169 switch (TREE_CODE (tsi_stmt (i)))
12170 {
12171 case CATCH_EXPR:
12172 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12173 catch expression and a body. The whole TRY_CATCH may fall
12174 through iff any of the catch bodies falls through. */
12175 for (; !tsi_end_p (i); tsi_next (&i))
12176 {
12177 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12178 return true;
12179 }
12180 return false;
12181
12182 case EH_FILTER_EXPR:
12183 /* The exception filter expression only matters if there is an
12184 exception. If the exception does not match EH_FILTER_TYPES,
12185 we will execute EH_FILTER_FAILURE, and we will fall through
12186 if that falls through. If the exception does match
12187 EH_FILTER_TYPES, the stack unwinder will continue up the
12188 stack, so we will not fall through. We don't know whether we
12189 will throw an exception which matches EH_FILTER_TYPES or not,
12190 so we just ignore EH_FILTER_TYPES and assume that we might
12191 throw an exception which doesn't match. */
12192 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12193
12194 default:
12195 /* This case represents statements to be executed when an
12196 exception occurs. Those statements are implicitly followed
12197 by a RESX statement to resume execution after the exception.
12198 So in this case the TRY_CATCH never falls through. */
12199 return false;
12200 }
12201 }
12202
12203 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12204 need not be 100% accurate; simply be conservative and return true if we
12205 don't know. This is used only to avoid stupidly generating extra code.
12206 If we're wrong, we'll just delete the extra code later. */
12207
12208 bool
12209 block_may_fallthru (const_tree block)
12210 {
12211 /* This CONST_CAST is okay because expr_last returns its argument
12212 unmodified and we assign it to a const_tree. */
12213 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12214
12215 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12216 {
12217 case GOTO_EXPR:
12218 case RETURN_EXPR:
12219 /* Easy cases. If the last statement of the block implies
12220 control transfer, then we can't fall through. */
12221 return false;
12222
12223 case SWITCH_EXPR:
12224 /* If SWITCH_LABELS is set, this is lowered, and represents a
12225 branch to a selected label and hence can not fall through.
12226 Otherwise SWITCH_BODY is set, and the switch can fall
12227 through. */
12228 return SWITCH_LABELS (stmt) == NULL_TREE;
12229
12230 case COND_EXPR:
12231 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12232 return true;
12233 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12234
12235 case BIND_EXPR:
12236 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12237
12238 case TRY_CATCH_EXPR:
12239 return try_catch_may_fallthru (stmt);
12240
12241 case TRY_FINALLY_EXPR:
12242 /* The finally clause is always executed after the try clause,
12243 so if it does not fall through, then the try-finally will not
12244 fall through. Otherwise, if the try clause does not fall
12245 through, then when the finally clause falls through it will
12246 resume execution wherever the try clause was going. So the
12247 whole try-finally will only fall through if both the try
12248 clause and the finally clause fall through. */
12249 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12250 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12251
12252 case MODIFY_EXPR:
12253 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12254 stmt = TREE_OPERAND (stmt, 1);
12255 else
12256 return true;
12257 /* FALLTHRU */
12258
12259 case CALL_EXPR:
12260 /* Functions that do not return do not fall through. */
12261 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12262
12263 case CLEANUP_POINT_EXPR:
12264 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12265
12266 case TARGET_EXPR:
12267 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12268
12269 case ERROR_MARK:
12270 return true;
12271
12272 default:
12273 return lang_hooks.block_may_fallthru (stmt);
12274 }
12275 }
12276
12277 /* True if we are using EH to handle cleanups. */
12278 static bool using_eh_for_cleanups_flag = false;
12279
12280 /* This routine is called from front ends to indicate eh should be used for
12281 cleanups. */
12282 void
12283 using_eh_for_cleanups (void)
12284 {
12285 using_eh_for_cleanups_flag = true;
12286 }
12287
12288 /* Query whether EH is used for cleanups. */
12289 bool
12290 using_eh_for_cleanups_p (void)
12291 {
12292 return using_eh_for_cleanups_flag;
12293 }
12294
12295 /* Wrapper for tree_code_name to ensure that tree code is valid */
12296 const char *
12297 get_tree_code_name (enum tree_code code)
12298 {
12299 const char *invalid = "<invalid tree code>";
12300
12301 if (code >= MAX_TREE_CODES)
12302 return invalid;
12303
12304 return tree_code_name[code];
12305 }
12306
12307 /* Drops the TREE_OVERFLOW flag from T. */
12308
12309 tree
12310 drop_tree_overflow (tree t)
12311 {
12312 gcc_checking_assert (TREE_OVERFLOW (t));
12313
12314 /* For tree codes with a sharing machinery re-build the result. */
12315 if (TREE_CODE (t) == INTEGER_CST)
12316 return wide_int_to_tree (TREE_TYPE (t), t);
12317
12318 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12319 and drop the flag. */
12320 t = copy_node (t);
12321 TREE_OVERFLOW (t) = 0;
12322 return t;
12323 }
12324
12325 /* Given a memory reference expression T, return its base address.
12326 The base address of a memory reference expression is the main
12327 object being referenced. For instance, the base address for
12328 'array[i].fld[j]' is 'array'. You can think of this as stripping
12329 away the offset part from a memory address.
12330
12331 This function calls handled_component_p to strip away all the inner
12332 parts of the memory reference until it reaches the base object. */
12333
12334 tree
12335 get_base_address (tree t)
12336 {
12337 while (handled_component_p (t))
12338 t = TREE_OPERAND (t, 0);
12339
12340 if ((TREE_CODE (t) == MEM_REF
12341 || TREE_CODE (t) == TARGET_MEM_REF)
12342 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12343 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12344
12345 /* ??? Either the alias oracle or all callers need to properly deal
12346 with WITH_SIZE_EXPRs before we can look through those. */
12347 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12348 return NULL_TREE;
12349
12350 return t;
12351 }
12352
12353 /* Return the machine mode of T. For vectors, returns the mode of the
12354 inner type. The main use case is to feed the result to HONOR_NANS,
12355 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
12356
12357 machine_mode
12358 element_mode (const_tree t)
12359 {
12360 if (!TYPE_P (t))
12361 t = TREE_TYPE (t);
12362 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
12363 t = TREE_TYPE (t);
12364 return TYPE_MODE (t);
12365 }
12366
12367 #include "gt-tree.h"