]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree.c
tree.c (type_in_anonymous_namespace_p): Ignore TREE_PUBLIC on builtin types.
[thirdparty/gcc.git] / gcc / tree.c
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
26
27 It is intended to be language-independent, but occasionally
28 calls language-dependent routines defined (for C) in typecheck.c. */
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "tm.h"
34 #include "flags.h"
35 #include "tree.h"
36 #include "stor-layout.h"
37 #include "calls.h"
38 #include "attribs.h"
39 #include "varasm.h"
40 #include "tm_p.h"
41 #include "function.h"
42 #include "obstack.h"
43 #include "toplev.h" /* get_random_seed */
44 #include "hashtab.h"
45 #include "filenames.h"
46 #include "output.h"
47 #include "target.h"
48 #include "common/common-target.h"
49 #include "langhooks.h"
50 #include "tree-inline.h"
51 #include "tree-iterator.h"
52 #include "basic-block.h"
53 #include "bitmap.h"
54 #include "pointer-set.h"
55 #include "tree-ssa-alias.h"
56 #include "internal-fn.h"
57 #include "gimple-expr.h"
58 #include "is-a.h"
59 #include "gimple.h"
60 #include "gimple-iterator.h"
61 #include "gimplify.h"
62 #include "gimple-ssa.h"
63 #include "cgraph.h"
64 #include "tree-phinodes.h"
65 #include "stringpool.h"
66 #include "tree-ssanames.h"
67 #include "expr.h"
68 #include "tree-dfa.h"
69 #include "params.h"
70 #include "tree-pass.h"
71 #include "langhooks-def.h"
72 #include "diagnostic.h"
73 #include "tree-diagnostic.h"
74 #include "tree-pretty-print.h"
75 #include "except.h"
76 #include "debug.h"
77 #include "intl.h"
78 #include "wide-int.h"
79 #include "builtins.h"
80
81 /* Tree code classes. */
82
83 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
84 #define END_OF_BASE_TREE_CODES tcc_exceptional,
85
86 const enum tree_code_class tree_code_type[] = {
87 #include "all-tree.def"
88 };
89
90 #undef DEFTREECODE
91 #undef END_OF_BASE_TREE_CODES
92
93 /* Table indexed by tree code giving number of expression
94 operands beyond the fixed part of the node structure.
95 Not used for types or decls. */
96
97 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
98 #define END_OF_BASE_TREE_CODES 0,
99
100 const unsigned char tree_code_length[] = {
101 #include "all-tree.def"
102 };
103
104 #undef DEFTREECODE
105 #undef END_OF_BASE_TREE_CODES
106
107 /* Names of tree components.
108 Used for printing out the tree and error messages. */
109 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
110 #define END_OF_BASE_TREE_CODES "@dummy",
111
112 static const char *const tree_code_name[] = {
113 #include "all-tree.def"
114 };
115
116 #undef DEFTREECODE
117 #undef END_OF_BASE_TREE_CODES
118
119 /* Each tree code class has an associated string representation.
120 These must correspond to the tree_code_class entries. */
121
122 const char *const tree_code_class_strings[] =
123 {
124 "exceptional",
125 "constant",
126 "type",
127 "declaration",
128 "reference",
129 "comparison",
130 "unary",
131 "binary",
132 "statement",
133 "vl_exp",
134 "expression"
135 };
136
137 /* obstack.[ch] explicitly declined to prototype this. */
138 extern int _obstack_allocated_p (struct obstack *h, void *obj);
139
140 /* Statistics-gathering stuff. */
141
142 static int tree_code_counts[MAX_TREE_CODES];
143 int tree_node_counts[(int) all_kinds];
144 int tree_node_sizes[(int) all_kinds];
145
146 /* Keep in sync with tree.h:enum tree_node_kind. */
147 static const char * const tree_node_kind_names[] = {
148 "decls",
149 "types",
150 "blocks",
151 "stmts",
152 "refs",
153 "exprs",
154 "constants",
155 "identifiers",
156 "vecs",
157 "binfos",
158 "ssa names",
159 "constructors",
160 "random kinds",
161 "lang_decl kinds",
162 "lang_type kinds",
163 "omp clauses",
164 };
165
166 /* Unique id for next decl created. */
167 static GTY(()) int next_decl_uid;
168 /* Unique id for next type created. */
169 static GTY(()) int next_type_uid = 1;
170 /* Unique id for next debug decl created. Use negative numbers,
171 to catch erroneous uses. */
172 static GTY(()) int next_debug_decl_uid;
173
174 /* Since we cannot rehash a type after it is in the table, we have to
175 keep the hash code. */
176
177 struct GTY(()) type_hash {
178 unsigned long hash;
179 tree type;
180 };
181
182 /* Initial size of the hash table (rounded to next prime). */
183 #define TYPE_HASH_INITIAL_SIZE 1000
184
185 /* Now here is the hash table. When recording a type, it is added to
186 the slot whose index is the hash code. Note that the hash table is
187 used for several kinds of types (function types, array types and
188 array index range types, for now). While all these live in the
189 same table, they are completely independent, and the hash code is
190 computed differently for each of these. */
191
192 static GTY ((if_marked ("type_hash_marked_p"), param_is (struct type_hash)))
193 htab_t type_hash_table;
194
195 /* Hash table and temporary node for larger integer const values. */
196 static GTY (()) tree int_cst_node;
197 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
198 htab_t int_cst_hash_table;
199
200 /* Hash table for optimization flags and target option flags. Use the same
201 hash table for both sets of options. Nodes for building the current
202 optimization and target option nodes. The assumption is most of the time
203 the options created will already be in the hash table, so we avoid
204 allocating and freeing up a node repeatably. */
205 static GTY (()) tree cl_optimization_node;
206 static GTY (()) tree cl_target_option_node;
207 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
208 htab_t cl_option_hash_table;
209
210 /* General tree->tree mapping structure for use in hash tables. */
211
212
213 static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map)))
214 htab_t debug_expr_for_decl;
215
216 static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map)))
217 htab_t value_expr_for_decl;
218
219 static GTY ((if_marked ("tree_vec_map_marked_p"), param_is (struct tree_vec_map)))
220 htab_t debug_args_for_decl;
221
222 static void set_type_quals (tree, int);
223 static int type_hash_eq (const void *, const void *);
224 static hashval_t type_hash_hash (const void *);
225 static hashval_t int_cst_hash_hash (const void *);
226 static int int_cst_hash_eq (const void *, const void *);
227 static hashval_t cl_option_hash_hash (const void *);
228 static int cl_option_hash_eq (const void *, const void *);
229 static void print_type_hash_statistics (void);
230 static void print_debug_expr_statistics (void);
231 static void print_value_expr_statistics (void);
232 static int type_hash_marked_p (const void *);
233 static unsigned int type_hash_list (const_tree, hashval_t);
234 static unsigned int attribute_hash_list (const_tree, hashval_t);
235
236 tree global_trees[TI_MAX];
237 tree integer_types[itk_none];
238
239 unsigned char tree_contains_struct[MAX_TREE_CODES][64];
240
241 /* Number of operands for each OpenMP clause. */
242 unsigned const char omp_clause_num_ops[] =
243 {
244 0, /* OMP_CLAUSE_ERROR */
245 1, /* OMP_CLAUSE_PRIVATE */
246 1, /* OMP_CLAUSE_SHARED */
247 1, /* OMP_CLAUSE_FIRSTPRIVATE */
248 2, /* OMP_CLAUSE_LASTPRIVATE */
249 4, /* OMP_CLAUSE_REDUCTION */
250 1, /* OMP_CLAUSE_COPYIN */
251 1, /* OMP_CLAUSE_COPYPRIVATE */
252 3, /* OMP_CLAUSE_LINEAR */
253 2, /* OMP_CLAUSE_ALIGNED */
254 1, /* OMP_CLAUSE_DEPEND */
255 1, /* OMP_CLAUSE_UNIFORM */
256 2, /* OMP_CLAUSE_FROM */
257 2, /* OMP_CLAUSE_TO */
258 2, /* OMP_CLAUSE_MAP */
259 1, /* OMP_CLAUSE__LOOPTEMP_ */
260 1, /* OMP_CLAUSE_IF */
261 1, /* OMP_CLAUSE_NUM_THREADS */
262 1, /* OMP_CLAUSE_SCHEDULE */
263 0, /* OMP_CLAUSE_NOWAIT */
264 0, /* OMP_CLAUSE_ORDERED */
265 0, /* OMP_CLAUSE_DEFAULT */
266 3, /* OMP_CLAUSE_COLLAPSE */
267 0, /* OMP_CLAUSE_UNTIED */
268 1, /* OMP_CLAUSE_FINAL */
269 0, /* OMP_CLAUSE_MERGEABLE */
270 1, /* OMP_CLAUSE_DEVICE */
271 1, /* OMP_CLAUSE_DIST_SCHEDULE */
272 0, /* OMP_CLAUSE_INBRANCH */
273 0, /* OMP_CLAUSE_NOTINBRANCH */
274 1, /* OMP_CLAUSE_NUM_TEAMS */
275 1, /* OMP_CLAUSE_THREAD_LIMIT */
276 0, /* OMP_CLAUSE_PROC_BIND */
277 1, /* OMP_CLAUSE_SAFELEN */
278 1, /* OMP_CLAUSE_SIMDLEN */
279 0, /* OMP_CLAUSE_FOR */
280 0, /* OMP_CLAUSE_PARALLEL */
281 0, /* OMP_CLAUSE_SECTIONS */
282 0, /* OMP_CLAUSE_TASKGROUP */
283 1, /* OMP_CLAUSE__SIMDUID_ */
284 };
285
286 const char * const omp_clause_code_name[] =
287 {
288 "error_clause",
289 "private",
290 "shared",
291 "firstprivate",
292 "lastprivate",
293 "reduction",
294 "copyin",
295 "copyprivate",
296 "linear",
297 "aligned",
298 "depend",
299 "uniform",
300 "from",
301 "to",
302 "map",
303 "_looptemp_",
304 "if",
305 "num_threads",
306 "schedule",
307 "nowait",
308 "ordered",
309 "default",
310 "collapse",
311 "untied",
312 "final",
313 "mergeable",
314 "device",
315 "dist_schedule",
316 "inbranch",
317 "notinbranch",
318 "num_teams",
319 "thread_limit",
320 "proc_bind",
321 "safelen",
322 "simdlen",
323 "for",
324 "parallel",
325 "sections",
326 "taskgroup",
327 "_simduid_"
328 };
329
330
331 /* Return the tree node structure used by tree code CODE. */
332
333 static inline enum tree_node_structure_enum
334 tree_node_structure_for_code (enum tree_code code)
335 {
336 switch (TREE_CODE_CLASS (code))
337 {
338 case tcc_declaration:
339 {
340 switch (code)
341 {
342 case FIELD_DECL:
343 return TS_FIELD_DECL;
344 case PARM_DECL:
345 return TS_PARM_DECL;
346 case VAR_DECL:
347 return TS_VAR_DECL;
348 case LABEL_DECL:
349 return TS_LABEL_DECL;
350 case RESULT_DECL:
351 return TS_RESULT_DECL;
352 case DEBUG_EXPR_DECL:
353 return TS_DECL_WRTL;
354 case CONST_DECL:
355 return TS_CONST_DECL;
356 case TYPE_DECL:
357 return TS_TYPE_DECL;
358 case FUNCTION_DECL:
359 return TS_FUNCTION_DECL;
360 case TRANSLATION_UNIT_DECL:
361 return TS_TRANSLATION_UNIT_DECL;
362 default:
363 return TS_DECL_NON_COMMON;
364 }
365 }
366 case tcc_type:
367 return TS_TYPE_NON_COMMON;
368 case tcc_reference:
369 case tcc_comparison:
370 case tcc_unary:
371 case tcc_binary:
372 case tcc_expression:
373 case tcc_statement:
374 case tcc_vl_exp:
375 return TS_EXP;
376 default: /* tcc_constant and tcc_exceptional */
377 break;
378 }
379 switch (code)
380 {
381 /* tcc_constant cases. */
382 case VOID_CST: return TS_TYPED;
383 case INTEGER_CST: return TS_INT_CST;
384 case REAL_CST: return TS_REAL_CST;
385 case FIXED_CST: return TS_FIXED_CST;
386 case COMPLEX_CST: return TS_COMPLEX;
387 case VECTOR_CST: return TS_VECTOR;
388 case STRING_CST: return TS_STRING;
389 /* tcc_exceptional cases. */
390 case ERROR_MARK: return TS_COMMON;
391 case IDENTIFIER_NODE: return TS_IDENTIFIER;
392 case TREE_LIST: return TS_LIST;
393 case TREE_VEC: return TS_VEC;
394 case SSA_NAME: return TS_SSA_NAME;
395 case PLACEHOLDER_EXPR: return TS_COMMON;
396 case STATEMENT_LIST: return TS_STATEMENT_LIST;
397 case BLOCK: return TS_BLOCK;
398 case CONSTRUCTOR: return TS_CONSTRUCTOR;
399 case TREE_BINFO: return TS_BINFO;
400 case OMP_CLAUSE: return TS_OMP_CLAUSE;
401 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
402 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
403
404 default:
405 gcc_unreachable ();
406 }
407 }
408
409
410 /* Initialize tree_contains_struct to describe the hierarchy of tree
411 nodes. */
412
413 static void
414 initialize_tree_contains_struct (void)
415 {
416 unsigned i;
417
418 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
419 {
420 enum tree_code code;
421 enum tree_node_structure_enum ts_code;
422
423 code = (enum tree_code) i;
424 ts_code = tree_node_structure_for_code (code);
425
426 /* Mark the TS structure itself. */
427 tree_contains_struct[code][ts_code] = 1;
428
429 /* Mark all the structures that TS is derived from. */
430 switch (ts_code)
431 {
432 case TS_TYPED:
433 case TS_BLOCK:
434 MARK_TS_BASE (code);
435 break;
436
437 case TS_COMMON:
438 case TS_INT_CST:
439 case TS_REAL_CST:
440 case TS_FIXED_CST:
441 case TS_VECTOR:
442 case TS_STRING:
443 case TS_COMPLEX:
444 case TS_SSA_NAME:
445 case TS_CONSTRUCTOR:
446 case TS_EXP:
447 case TS_STATEMENT_LIST:
448 MARK_TS_TYPED (code);
449 break;
450
451 case TS_IDENTIFIER:
452 case TS_DECL_MINIMAL:
453 case TS_TYPE_COMMON:
454 case TS_LIST:
455 case TS_VEC:
456 case TS_BINFO:
457 case TS_OMP_CLAUSE:
458 case TS_OPTIMIZATION:
459 case TS_TARGET_OPTION:
460 MARK_TS_COMMON (code);
461 break;
462
463 case TS_TYPE_WITH_LANG_SPECIFIC:
464 MARK_TS_TYPE_COMMON (code);
465 break;
466
467 case TS_TYPE_NON_COMMON:
468 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
469 break;
470
471 case TS_DECL_COMMON:
472 MARK_TS_DECL_MINIMAL (code);
473 break;
474
475 case TS_DECL_WRTL:
476 case TS_CONST_DECL:
477 MARK_TS_DECL_COMMON (code);
478 break;
479
480 case TS_DECL_NON_COMMON:
481 MARK_TS_DECL_WITH_VIS (code);
482 break;
483
484 case TS_DECL_WITH_VIS:
485 case TS_PARM_DECL:
486 case TS_LABEL_DECL:
487 case TS_RESULT_DECL:
488 MARK_TS_DECL_WRTL (code);
489 break;
490
491 case TS_FIELD_DECL:
492 MARK_TS_DECL_COMMON (code);
493 break;
494
495 case TS_VAR_DECL:
496 MARK_TS_DECL_WITH_VIS (code);
497 break;
498
499 case TS_TYPE_DECL:
500 case TS_FUNCTION_DECL:
501 MARK_TS_DECL_NON_COMMON (code);
502 break;
503
504 case TS_TRANSLATION_UNIT_DECL:
505 MARK_TS_DECL_COMMON (code);
506 break;
507
508 default:
509 gcc_unreachable ();
510 }
511 }
512
513 /* Basic consistency checks for attributes used in fold. */
514 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
515 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
516 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
517 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
518 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
519 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
520 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
521 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
522 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
523 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
524 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
525 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
526 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
527 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
528 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
529 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
530 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
531 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
532 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
533 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
534 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
535 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
536 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
537 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
538 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
539 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
540 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
541 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
542 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
543 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
544 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
545 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
546 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
547 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
548 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
549 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
550 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
551 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
552 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
553 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
554 }
555
556
557 /* Init tree.c. */
558
559 void
560 init_ttree (void)
561 {
562 /* Initialize the hash table of types. */
563 type_hash_table = htab_create_ggc (TYPE_HASH_INITIAL_SIZE, type_hash_hash,
564 type_hash_eq, 0);
565
566 debug_expr_for_decl = htab_create_ggc (512, tree_decl_map_hash,
567 tree_decl_map_eq, 0);
568
569 value_expr_for_decl = htab_create_ggc (512, tree_decl_map_hash,
570 tree_decl_map_eq, 0);
571
572 int_cst_hash_table = htab_create_ggc (1024, int_cst_hash_hash,
573 int_cst_hash_eq, NULL);
574
575 int_cst_node = make_int_cst (1, 1);
576
577 cl_option_hash_table = htab_create_ggc (64, cl_option_hash_hash,
578 cl_option_hash_eq, NULL);
579
580 cl_optimization_node = make_node (OPTIMIZATION_NODE);
581 cl_target_option_node = make_node (TARGET_OPTION_NODE);
582
583 /* Initialize the tree_contains_struct array. */
584 initialize_tree_contains_struct ();
585 lang_hooks.init_ts ();
586 }
587
588 \f
589 /* The name of the object as the assembler will see it (but before any
590 translations made by ASM_OUTPUT_LABELREF). Often this is the same
591 as DECL_NAME. It is an IDENTIFIER_NODE. */
592 tree
593 decl_assembler_name (tree decl)
594 {
595 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
596 lang_hooks.set_decl_assembler_name (decl);
597 return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name;
598 }
599
600 /* When the target supports COMDAT groups, this indicates which group the
601 DECL is associated with. This can be either an IDENTIFIER_NODE or a
602 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
603 tree
604 decl_comdat_group (const_tree node)
605 {
606 struct symtab_node *snode = symtab_get_node (node);
607 if (!snode)
608 return NULL;
609 return snode->get_comdat_group ();
610 }
611
612 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
613 tree
614 decl_comdat_group_id (const_tree node)
615 {
616 struct symtab_node *snode = symtab_get_node (node);
617 if (!snode)
618 return NULL;
619 return snode->get_comdat_group_id ();
620 }
621
622 /* When the target supports named section, return its name as IDENTIFIER_NODE
623 or NULL if it is in no section. */
624 const char *
625 decl_section_name (const_tree node)
626 {
627 struct symtab_node *snode = symtab_get_node (node);
628 if (!snode)
629 return NULL;
630 return snode->get_section ();
631 }
632
633 /* Set section section name of NODE to VALUE (that is expected to
634 be identifier node) */
635 void
636 set_decl_section_name (tree node, const char *value)
637 {
638 struct symtab_node *snode;
639
640 if (value == NULL)
641 {
642 snode = symtab_get_node (node);
643 if (!snode)
644 return;
645 }
646 else if (TREE_CODE (node) == VAR_DECL)
647 snode = varpool_node_for_decl (node);
648 else
649 snode = cgraph_get_create_node (node);
650 snode->set_section (value);
651 }
652
653 /* Return TLS model of a variable NODE. */
654 enum tls_model
655 decl_tls_model (const_tree node)
656 {
657 struct varpool_node *snode = varpool_get_node (node);
658 if (!snode)
659 return TLS_MODEL_NONE;
660 return snode->tls_model;
661 }
662
663 /* Set TLS model of variable NODE to MODEL. */
664 void
665 set_decl_tls_model (tree node, enum tls_model model)
666 {
667 struct varpool_node *vnode;
668
669 if (model == TLS_MODEL_NONE)
670 {
671 vnode = varpool_get_node (node);
672 if (!vnode)
673 return;
674 }
675 else
676 vnode = varpool_node_for_decl (node);
677 vnode->tls_model = model;
678 }
679
680 /* Compute the number of bytes occupied by a tree with code CODE.
681 This function cannot be used for nodes that have variable sizes,
682 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
683 size_t
684 tree_code_size (enum tree_code code)
685 {
686 switch (TREE_CODE_CLASS (code))
687 {
688 case tcc_declaration: /* A decl node */
689 {
690 switch (code)
691 {
692 case FIELD_DECL:
693 return sizeof (struct tree_field_decl);
694 case PARM_DECL:
695 return sizeof (struct tree_parm_decl);
696 case VAR_DECL:
697 return sizeof (struct tree_var_decl);
698 case LABEL_DECL:
699 return sizeof (struct tree_label_decl);
700 case RESULT_DECL:
701 return sizeof (struct tree_result_decl);
702 case CONST_DECL:
703 return sizeof (struct tree_const_decl);
704 case TYPE_DECL:
705 return sizeof (struct tree_type_decl);
706 case FUNCTION_DECL:
707 return sizeof (struct tree_function_decl);
708 case DEBUG_EXPR_DECL:
709 return sizeof (struct tree_decl_with_rtl);
710 default:
711 return sizeof (struct tree_decl_non_common);
712 }
713 }
714
715 case tcc_type: /* a type node */
716 return sizeof (struct tree_type_non_common);
717
718 case tcc_reference: /* a reference */
719 case tcc_expression: /* an expression */
720 case tcc_statement: /* an expression with side effects */
721 case tcc_comparison: /* a comparison expression */
722 case tcc_unary: /* a unary arithmetic expression */
723 case tcc_binary: /* a binary arithmetic expression */
724 return (sizeof (struct tree_exp)
725 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
726
727 case tcc_constant: /* a constant */
728 switch (code)
729 {
730 case VOID_CST: return sizeof (struct tree_typed);
731 case INTEGER_CST: gcc_unreachable ();
732 case REAL_CST: return sizeof (struct tree_real_cst);
733 case FIXED_CST: return sizeof (struct tree_fixed_cst);
734 case COMPLEX_CST: return sizeof (struct tree_complex);
735 case VECTOR_CST: return sizeof (struct tree_vector);
736 case STRING_CST: gcc_unreachable ();
737 default:
738 return lang_hooks.tree_size (code);
739 }
740
741 case tcc_exceptional: /* something random, like an identifier. */
742 switch (code)
743 {
744 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
745 case TREE_LIST: return sizeof (struct tree_list);
746
747 case ERROR_MARK:
748 case PLACEHOLDER_EXPR: return sizeof (struct tree_common);
749
750 case TREE_VEC:
751 case OMP_CLAUSE: gcc_unreachable ();
752
753 case SSA_NAME: return sizeof (struct tree_ssa_name);
754
755 case STATEMENT_LIST: return sizeof (struct tree_statement_list);
756 case BLOCK: return sizeof (struct tree_block);
757 case CONSTRUCTOR: return sizeof (struct tree_constructor);
758 case OPTIMIZATION_NODE: return sizeof (struct tree_optimization_option);
759 case TARGET_OPTION_NODE: return sizeof (struct tree_target_option);
760
761 default:
762 return lang_hooks.tree_size (code);
763 }
764
765 default:
766 gcc_unreachable ();
767 }
768 }
769
770 /* Compute the number of bytes occupied by NODE. This routine only
771 looks at TREE_CODE, except for those nodes that have variable sizes. */
772 size_t
773 tree_size (const_tree node)
774 {
775 const enum tree_code code = TREE_CODE (node);
776 switch (code)
777 {
778 case INTEGER_CST:
779 return (sizeof (struct tree_int_cst)
780 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
781
782 case TREE_BINFO:
783 return (offsetof (struct tree_binfo, base_binfos)
784 + vec<tree, va_gc>
785 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
786
787 case TREE_VEC:
788 return (sizeof (struct tree_vec)
789 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
790
791 case VECTOR_CST:
792 return (sizeof (struct tree_vector)
793 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node)) - 1) * sizeof (tree));
794
795 case STRING_CST:
796 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
797
798 case OMP_CLAUSE:
799 return (sizeof (struct tree_omp_clause)
800 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
801 * sizeof (tree));
802
803 default:
804 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
805 return (sizeof (struct tree_exp)
806 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
807 else
808 return tree_code_size (code);
809 }
810 }
811
812 /* Record interesting allocation statistics for a tree node with CODE
813 and LENGTH. */
814
815 static void
816 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
817 size_t length ATTRIBUTE_UNUSED)
818 {
819 enum tree_code_class type = TREE_CODE_CLASS (code);
820 tree_node_kind kind;
821
822 if (!GATHER_STATISTICS)
823 return;
824
825 switch (type)
826 {
827 case tcc_declaration: /* A decl node */
828 kind = d_kind;
829 break;
830
831 case tcc_type: /* a type node */
832 kind = t_kind;
833 break;
834
835 case tcc_statement: /* an expression with side effects */
836 kind = s_kind;
837 break;
838
839 case tcc_reference: /* a reference */
840 kind = r_kind;
841 break;
842
843 case tcc_expression: /* an expression */
844 case tcc_comparison: /* a comparison expression */
845 case tcc_unary: /* a unary arithmetic expression */
846 case tcc_binary: /* a binary arithmetic expression */
847 kind = e_kind;
848 break;
849
850 case tcc_constant: /* a constant */
851 kind = c_kind;
852 break;
853
854 case tcc_exceptional: /* something random, like an identifier. */
855 switch (code)
856 {
857 case IDENTIFIER_NODE:
858 kind = id_kind;
859 break;
860
861 case TREE_VEC:
862 kind = vec_kind;
863 break;
864
865 case TREE_BINFO:
866 kind = binfo_kind;
867 break;
868
869 case SSA_NAME:
870 kind = ssa_name_kind;
871 break;
872
873 case BLOCK:
874 kind = b_kind;
875 break;
876
877 case CONSTRUCTOR:
878 kind = constr_kind;
879 break;
880
881 case OMP_CLAUSE:
882 kind = omp_clause_kind;
883 break;
884
885 default:
886 kind = x_kind;
887 break;
888 }
889 break;
890
891 case tcc_vl_exp:
892 kind = e_kind;
893 break;
894
895 default:
896 gcc_unreachable ();
897 }
898
899 tree_code_counts[(int) code]++;
900 tree_node_counts[(int) kind]++;
901 tree_node_sizes[(int) kind] += length;
902 }
903
904 /* Allocate and return a new UID from the DECL_UID namespace. */
905
906 int
907 allocate_decl_uid (void)
908 {
909 return next_decl_uid++;
910 }
911
912 /* Return a newly allocated node of code CODE. For decl and type
913 nodes, some other fields are initialized. The rest of the node is
914 initialized to zero. This function cannot be used for TREE_VEC,
915 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
916 tree_code_size.
917
918 Achoo! I got a code in the node. */
919
920 tree
921 make_node_stat (enum tree_code code MEM_STAT_DECL)
922 {
923 tree t;
924 enum tree_code_class type = TREE_CODE_CLASS (code);
925 size_t length = tree_code_size (code);
926
927 record_node_allocation_statistics (code, length);
928
929 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
930 TREE_SET_CODE (t, code);
931
932 switch (type)
933 {
934 case tcc_statement:
935 TREE_SIDE_EFFECTS (t) = 1;
936 break;
937
938 case tcc_declaration:
939 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
940 {
941 if (code == FUNCTION_DECL)
942 {
943 DECL_ALIGN (t) = FUNCTION_BOUNDARY;
944 DECL_MODE (t) = FUNCTION_MODE;
945 }
946 else
947 DECL_ALIGN (t) = 1;
948 }
949 DECL_SOURCE_LOCATION (t) = input_location;
950 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
951 DECL_UID (t) = --next_debug_decl_uid;
952 else
953 {
954 DECL_UID (t) = allocate_decl_uid ();
955 SET_DECL_PT_UID (t, -1);
956 }
957 if (TREE_CODE (t) == LABEL_DECL)
958 LABEL_DECL_UID (t) = -1;
959
960 break;
961
962 case tcc_type:
963 TYPE_UID (t) = next_type_uid++;
964 TYPE_ALIGN (t) = BITS_PER_UNIT;
965 TYPE_USER_ALIGN (t) = 0;
966 TYPE_MAIN_VARIANT (t) = t;
967 TYPE_CANONICAL (t) = t;
968
969 /* Default to no attributes for type, but let target change that. */
970 TYPE_ATTRIBUTES (t) = NULL_TREE;
971 targetm.set_default_type_attributes (t);
972
973 /* We have not yet computed the alias set for this type. */
974 TYPE_ALIAS_SET (t) = -1;
975 break;
976
977 case tcc_constant:
978 TREE_CONSTANT (t) = 1;
979 break;
980
981 case tcc_expression:
982 switch (code)
983 {
984 case INIT_EXPR:
985 case MODIFY_EXPR:
986 case VA_ARG_EXPR:
987 case PREDECREMENT_EXPR:
988 case PREINCREMENT_EXPR:
989 case POSTDECREMENT_EXPR:
990 case POSTINCREMENT_EXPR:
991 /* All of these have side-effects, no matter what their
992 operands are. */
993 TREE_SIDE_EFFECTS (t) = 1;
994 break;
995
996 default:
997 break;
998 }
999 break;
1000
1001 default:
1002 /* Other classes need no special treatment. */
1003 break;
1004 }
1005
1006 return t;
1007 }
1008 \f
1009 /* Return a new node with the same contents as NODE except that its
1010 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1011
1012 tree
1013 copy_node_stat (tree node MEM_STAT_DECL)
1014 {
1015 tree t;
1016 enum tree_code code = TREE_CODE (node);
1017 size_t length;
1018
1019 gcc_assert (code != STATEMENT_LIST);
1020
1021 length = tree_size (node);
1022 record_node_allocation_statistics (code, length);
1023 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1024 memcpy (t, node, length);
1025
1026 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1027 TREE_CHAIN (t) = 0;
1028 TREE_ASM_WRITTEN (t) = 0;
1029 TREE_VISITED (t) = 0;
1030
1031 if (TREE_CODE_CLASS (code) == tcc_declaration)
1032 {
1033 if (code == DEBUG_EXPR_DECL)
1034 DECL_UID (t) = --next_debug_decl_uid;
1035 else
1036 {
1037 DECL_UID (t) = allocate_decl_uid ();
1038 if (DECL_PT_UID_SET_P (node))
1039 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1040 }
1041 if ((TREE_CODE (node) == PARM_DECL || TREE_CODE (node) == VAR_DECL)
1042 && DECL_HAS_VALUE_EXPR_P (node))
1043 {
1044 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1045 DECL_HAS_VALUE_EXPR_P (t) = 1;
1046 }
1047 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1048 if (TREE_CODE (node) == VAR_DECL)
1049 {
1050 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1051 t->decl_with_vis.symtab_node = NULL;
1052 }
1053 if (TREE_CODE (node) == VAR_DECL && DECL_HAS_INIT_PRIORITY_P (node))
1054 {
1055 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1056 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1057 }
1058 if (TREE_CODE (node) == FUNCTION_DECL)
1059 {
1060 DECL_STRUCT_FUNCTION (t) = NULL;
1061 t->decl_with_vis.symtab_node = NULL;
1062 }
1063 }
1064 else if (TREE_CODE_CLASS (code) == tcc_type)
1065 {
1066 TYPE_UID (t) = next_type_uid++;
1067 /* The following is so that the debug code for
1068 the copy is different from the original type.
1069 The two statements usually duplicate each other
1070 (because they clear fields of the same union),
1071 but the optimizer should catch that. */
1072 TYPE_SYMTAB_POINTER (t) = 0;
1073 TYPE_SYMTAB_ADDRESS (t) = 0;
1074
1075 /* Do not copy the values cache. */
1076 if (TYPE_CACHED_VALUES_P (t))
1077 {
1078 TYPE_CACHED_VALUES_P (t) = 0;
1079 TYPE_CACHED_VALUES (t) = NULL_TREE;
1080 }
1081 }
1082
1083 return t;
1084 }
1085
1086 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1087 For example, this can copy a list made of TREE_LIST nodes. */
1088
1089 tree
1090 copy_list (tree list)
1091 {
1092 tree head;
1093 tree prev, next;
1094
1095 if (list == 0)
1096 return 0;
1097
1098 head = prev = copy_node (list);
1099 next = TREE_CHAIN (list);
1100 while (next)
1101 {
1102 TREE_CHAIN (prev) = copy_node (next);
1103 prev = TREE_CHAIN (prev);
1104 next = TREE_CHAIN (next);
1105 }
1106 return head;
1107 }
1108
1109 \f
1110 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1111 INTEGER_CST with value CST and type TYPE. */
1112
1113 static unsigned int
1114 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1115 {
1116 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1117 /* We need an extra zero HWI if CST is an unsigned integer with its
1118 upper bit set, and if CST occupies a whole number of HWIs. */
1119 if (TYPE_UNSIGNED (type)
1120 && wi::neg_p (cst)
1121 && (cst.get_precision () % HOST_BITS_PER_WIDE_INT) == 0)
1122 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1123 return cst.get_len ();
1124 }
1125
1126 /* Return a new INTEGER_CST with value CST and type TYPE. */
1127
1128 static tree
1129 build_new_int_cst (tree type, const wide_int &cst)
1130 {
1131 unsigned int len = cst.get_len ();
1132 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1133 tree nt = make_int_cst (len, ext_len);
1134
1135 if (len < ext_len)
1136 {
1137 --ext_len;
1138 TREE_INT_CST_ELT (nt, ext_len) = 0;
1139 for (unsigned int i = len; i < ext_len; ++i)
1140 TREE_INT_CST_ELT (nt, i) = -1;
1141 }
1142 else if (TYPE_UNSIGNED (type)
1143 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1144 {
1145 len--;
1146 TREE_INT_CST_ELT (nt, len)
1147 = zext_hwi (cst.elt (len),
1148 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1149 }
1150
1151 for (unsigned int i = 0; i < len; i++)
1152 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1153 TREE_TYPE (nt) = type;
1154 return nt;
1155 }
1156
1157 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1158
1159 tree
1160 build_int_cst (tree type, HOST_WIDE_INT low)
1161 {
1162 /* Support legacy code. */
1163 if (!type)
1164 type = integer_type_node;
1165
1166 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1167 }
1168
1169 tree
1170 build_int_cstu (tree type, unsigned HOST_WIDE_INT cst)
1171 {
1172 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1173 }
1174
1175 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1176
1177 tree
1178 build_int_cst_type (tree type, HOST_WIDE_INT low)
1179 {
1180 gcc_assert (type);
1181 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1182 }
1183
1184 /* Constructs tree in type TYPE from with value given by CST. Signedness
1185 of CST is assumed to be the same as the signedness of TYPE. */
1186
1187 tree
1188 double_int_to_tree (tree type, double_int cst)
1189 {
1190 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1191 }
1192
1193 /* We force the wide_int CST to the range of the type TYPE by sign or
1194 zero extending it. OVERFLOWABLE indicates if we are interested in
1195 overflow of the value, when >0 we are only interested in signed
1196 overflow, for <0 we are interested in any overflow. OVERFLOWED
1197 indicates whether overflow has already occurred. CONST_OVERFLOWED
1198 indicates whether constant overflow has already occurred. We force
1199 T's value to be within range of T's type (by setting to 0 or 1 all
1200 the bits outside the type's range). We set TREE_OVERFLOWED if,
1201 OVERFLOWED is nonzero,
1202 or OVERFLOWABLE is >0 and signed overflow occurs
1203 or OVERFLOWABLE is <0 and any overflow occurs
1204 We return a new tree node for the extended wide_int. The node
1205 is shared if no overflow flags are set. */
1206
1207
1208 tree
1209 force_fit_type (tree type, const wide_int_ref &cst,
1210 int overflowable, bool overflowed)
1211 {
1212 signop sign = TYPE_SIGN (type);
1213
1214 /* If we need to set overflow flags, return a new unshared node. */
1215 if (overflowed || !wi::fits_to_tree_p (cst, type))
1216 {
1217 if (overflowed
1218 || overflowable < 0
1219 || (overflowable > 0 && sign == SIGNED))
1220 {
1221 wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign);
1222 tree t = build_new_int_cst (type, tmp);
1223 TREE_OVERFLOW (t) = 1;
1224 return t;
1225 }
1226 }
1227
1228 /* Else build a shared node. */
1229 return wide_int_to_tree (type, cst);
1230 }
1231
1232 /* These are the hash table functions for the hash table of INTEGER_CST
1233 nodes of a sizetype. */
1234
1235 /* Return the hash code code X, an INTEGER_CST. */
1236
1237 static hashval_t
1238 int_cst_hash_hash (const void *x)
1239 {
1240 const_tree const t = (const_tree) x;
1241 hashval_t code = htab_hash_pointer (TREE_TYPE (t));
1242 int i;
1243
1244 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1245 code ^= TREE_INT_CST_ELT (t, i);
1246
1247 return code;
1248 }
1249
1250 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1251 is the same as that given by *Y, which is the same. */
1252
1253 static int
1254 int_cst_hash_eq (const void *x, const void *y)
1255 {
1256 const_tree const xt = (const_tree) x;
1257 const_tree const yt = (const_tree) y;
1258
1259 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1260 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1261 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1262 return false;
1263
1264 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1265 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1266 return false;
1267
1268 return true;
1269 }
1270
1271 /* Create an INT_CST node of TYPE and value CST.
1272 The returned node is always shared. For small integers we use a
1273 per-type vector cache, for larger ones we use a single hash table.
1274 The value is extended from its precision according to the sign of
1275 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1276 the upper bits and ensures that hashing and value equality based
1277 upon the underlying HOST_WIDE_INTs works without masking. */
1278
1279 tree
1280 wide_int_to_tree (tree type, const wide_int_ref &pcst)
1281 {
1282 tree t;
1283 int ix = -1;
1284 int limit = 0;
1285
1286 gcc_assert (type);
1287 unsigned int prec = TYPE_PRECISION (type);
1288 signop sgn = TYPE_SIGN (type);
1289
1290 /* Verify that everything is canonical. */
1291 int l = pcst.get_len ();
1292 if (l > 1)
1293 {
1294 if (pcst.elt (l - 1) == 0)
1295 gcc_checking_assert (pcst.elt (l - 2) < 0);
1296 if (pcst.elt (l - 1) == (HOST_WIDE_INT) -1)
1297 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1298 }
1299
1300 wide_int cst = wide_int::from (pcst, prec, sgn);
1301 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1302
1303 if (ext_len == 1)
1304 {
1305 /* We just need to store a single HOST_WIDE_INT. */
1306 HOST_WIDE_INT hwi;
1307 if (TYPE_UNSIGNED (type))
1308 hwi = cst.to_uhwi ();
1309 else
1310 hwi = cst.to_shwi ();
1311
1312 switch (TREE_CODE (type))
1313 {
1314 case NULLPTR_TYPE:
1315 gcc_assert (hwi == 0);
1316 /* Fallthru. */
1317
1318 case POINTER_TYPE:
1319 case REFERENCE_TYPE:
1320 /* Cache NULL pointer. */
1321 if (hwi == 0)
1322 {
1323 limit = 1;
1324 ix = 0;
1325 }
1326 break;
1327
1328 case BOOLEAN_TYPE:
1329 /* Cache false or true. */
1330 limit = 2;
1331 if (hwi < 2)
1332 ix = hwi;
1333 break;
1334
1335 case INTEGER_TYPE:
1336 case OFFSET_TYPE:
1337 if (TYPE_SIGN (type) == UNSIGNED)
1338 {
1339 /* Cache [0, N). */
1340 limit = INTEGER_SHARE_LIMIT;
1341 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1))
1342 ix = hwi;
1343 }
1344 else
1345 {
1346 /* Cache [-1, N). */
1347 limit = INTEGER_SHARE_LIMIT + 1;
1348 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1))
1349 ix = hwi + 1;
1350 }
1351 break;
1352
1353 case ENUMERAL_TYPE:
1354 break;
1355
1356 default:
1357 gcc_unreachable ();
1358 }
1359
1360 if (ix >= 0)
1361 {
1362 /* Look for it in the type's vector of small shared ints. */
1363 if (!TYPE_CACHED_VALUES_P (type))
1364 {
1365 TYPE_CACHED_VALUES_P (type) = 1;
1366 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1367 }
1368
1369 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1370 if (t)
1371 /* Make sure no one is clobbering the shared constant. */
1372 gcc_checking_assert (TREE_TYPE (t) == type
1373 && TREE_INT_CST_NUNITS (t) == 1
1374 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1375 && TREE_INT_CST_EXT_NUNITS (t) == 1
1376 && TREE_INT_CST_ELT (t, 0) == hwi);
1377 else
1378 {
1379 /* Create a new shared int. */
1380 t = build_new_int_cst (type, cst);
1381 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1382 }
1383 }
1384 else
1385 {
1386 /* Use the cache of larger shared ints, using int_cst_node as
1387 a temporary. */
1388 void **slot;
1389
1390 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1391 TREE_TYPE (int_cst_node) = type;
1392
1393 slot = htab_find_slot (int_cst_hash_table, int_cst_node, INSERT);
1394 t = (tree) *slot;
1395 if (!t)
1396 {
1397 /* Insert this one into the hash table. */
1398 t = int_cst_node;
1399 *slot = t;
1400 /* Make a new node for next time round. */
1401 int_cst_node = make_int_cst (1, 1);
1402 }
1403 }
1404 }
1405 else
1406 {
1407 /* The value either hashes properly or we drop it on the floor
1408 for the gc to take care of. There will not be enough of them
1409 to worry about. */
1410 void **slot;
1411
1412 tree nt = build_new_int_cst (type, cst);
1413 slot = htab_find_slot (int_cst_hash_table, nt, INSERT);
1414 t = (tree) *slot;
1415 if (!t)
1416 {
1417 /* Insert this one into the hash table. */
1418 t = nt;
1419 *slot = t;
1420 }
1421 }
1422
1423 return t;
1424 }
1425
1426 void
1427 cache_integer_cst (tree t)
1428 {
1429 tree type = TREE_TYPE (t);
1430 int ix = -1;
1431 int limit = 0;
1432 int prec = TYPE_PRECISION (type);
1433
1434 gcc_assert (!TREE_OVERFLOW (t));
1435
1436 switch (TREE_CODE (type))
1437 {
1438 case NULLPTR_TYPE:
1439 gcc_assert (integer_zerop (t));
1440 /* Fallthru. */
1441
1442 case POINTER_TYPE:
1443 case REFERENCE_TYPE:
1444 /* Cache NULL pointer. */
1445 if (integer_zerop (t))
1446 {
1447 limit = 1;
1448 ix = 0;
1449 }
1450 break;
1451
1452 case BOOLEAN_TYPE:
1453 /* Cache false or true. */
1454 limit = 2;
1455 if (wi::ltu_p (t, 2))
1456 ix = TREE_INT_CST_ELT (t, 0);
1457 break;
1458
1459 case INTEGER_TYPE:
1460 case OFFSET_TYPE:
1461 if (TYPE_UNSIGNED (type))
1462 {
1463 /* Cache 0..N */
1464 limit = INTEGER_SHARE_LIMIT;
1465
1466 /* This is a little hokie, but if the prec is smaller than
1467 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1468 obvious test will not get the correct answer. */
1469 if (prec < HOST_BITS_PER_WIDE_INT)
1470 {
1471 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1472 ix = tree_to_uhwi (t);
1473 }
1474 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1475 ix = tree_to_uhwi (t);
1476 }
1477 else
1478 {
1479 /* Cache -1..N */
1480 limit = INTEGER_SHARE_LIMIT + 1;
1481
1482 if (integer_minus_onep (t))
1483 ix = 0;
1484 else if (!wi::neg_p (t))
1485 {
1486 if (prec < HOST_BITS_PER_WIDE_INT)
1487 {
1488 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1489 ix = tree_to_shwi (t) + 1;
1490 }
1491 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1492 ix = tree_to_shwi (t) + 1;
1493 }
1494 }
1495 break;
1496
1497 case ENUMERAL_TYPE:
1498 break;
1499
1500 default:
1501 gcc_unreachable ();
1502 }
1503
1504 if (ix >= 0)
1505 {
1506 /* Look for it in the type's vector of small shared ints. */
1507 if (!TYPE_CACHED_VALUES_P (type))
1508 {
1509 TYPE_CACHED_VALUES_P (type) = 1;
1510 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1511 }
1512
1513 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1514 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1515 }
1516 else
1517 {
1518 /* Use the cache of larger shared ints. */
1519 void **slot;
1520
1521 slot = htab_find_slot (int_cst_hash_table, t, INSERT);
1522 /* If there is already an entry for the number verify it's the
1523 same. */
1524 if (*slot)
1525 gcc_assert (wi::eq_p (tree (*slot), t));
1526 else
1527 /* Otherwise insert this one into the hash table. */
1528 *slot = t;
1529 }
1530 }
1531
1532
1533 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1534 and the rest are zeros. */
1535
1536 tree
1537 build_low_bits_mask (tree type, unsigned bits)
1538 {
1539 gcc_assert (bits <= TYPE_PRECISION (type));
1540
1541 return wide_int_to_tree (type, wi::mask (bits, false,
1542 TYPE_PRECISION (type)));
1543 }
1544
1545 /* Checks that X is integer constant that can be expressed in (unsigned)
1546 HOST_WIDE_INT without loss of precision. */
1547
1548 bool
1549 cst_and_fits_in_hwi (const_tree x)
1550 {
1551 if (TREE_CODE (x) != INTEGER_CST)
1552 return false;
1553
1554 if (TYPE_PRECISION (TREE_TYPE (x)) > HOST_BITS_PER_WIDE_INT)
1555 return false;
1556
1557 return TREE_INT_CST_NUNITS (x) == 1;
1558 }
1559
1560 /* Build a newly constructed TREE_VEC node of length LEN. */
1561
1562 tree
1563 make_vector_stat (unsigned len MEM_STAT_DECL)
1564 {
1565 tree t;
1566 unsigned length = (len - 1) * sizeof (tree) + sizeof (struct tree_vector);
1567
1568 record_node_allocation_statistics (VECTOR_CST, length);
1569
1570 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1571
1572 TREE_SET_CODE (t, VECTOR_CST);
1573 TREE_CONSTANT (t) = 1;
1574
1575 return t;
1576 }
1577
1578 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1579 are in a list pointed to by VALS. */
1580
1581 tree
1582 build_vector_stat (tree type, tree *vals MEM_STAT_DECL)
1583 {
1584 int over = 0;
1585 unsigned cnt = 0;
1586 tree v = make_vector (TYPE_VECTOR_SUBPARTS (type));
1587 TREE_TYPE (v) = type;
1588
1589 /* Iterate through elements and check for overflow. */
1590 for (cnt = 0; cnt < TYPE_VECTOR_SUBPARTS (type); ++cnt)
1591 {
1592 tree value = vals[cnt];
1593
1594 VECTOR_CST_ELT (v, cnt) = value;
1595
1596 /* Don't crash if we get an address constant. */
1597 if (!CONSTANT_CLASS_P (value))
1598 continue;
1599
1600 over |= TREE_OVERFLOW (value);
1601 }
1602
1603 TREE_OVERFLOW (v) = over;
1604 return v;
1605 }
1606
1607 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1608 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1609
1610 tree
1611 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1612 {
1613 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
1614 unsigned HOST_WIDE_INT idx;
1615 tree value;
1616
1617 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1618 vec[idx] = value;
1619 for (; idx < TYPE_VECTOR_SUBPARTS (type); ++idx)
1620 vec[idx] = build_zero_cst (TREE_TYPE (type));
1621
1622 return build_vector (type, vec);
1623 }
1624
1625 /* Build a vector of type VECTYPE where all the elements are SCs. */
1626 tree
1627 build_vector_from_val (tree vectype, tree sc)
1628 {
1629 int i, nunits = TYPE_VECTOR_SUBPARTS (vectype);
1630
1631 if (sc == error_mark_node)
1632 return sc;
1633
1634 /* Verify that the vector type is suitable for SC. Note that there
1635 is some inconsistency in the type-system with respect to restrict
1636 qualifications of pointers. Vector types always have a main-variant
1637 element type and the qualification is applied to the vector-type.
1638 So TREE_TYPE (vector-type) does not return a properly qualified
1639 vector element-type. */
1640 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1641 TREE_TYPE (vectype)));
1642
1643 if (CONSTANT_CLASS_P (sc))
1644 {
1645 tree *v = XALLOCAVEC (tree, nunits);
1646 for (i = 0; i < nunits; ++i)
1647 v[i] = sc;
1648 return build_vector (vectype, v);
1649 }
1650 else
1651 {
1652 vec<constructor_elt, va_gc> *v;
1653 vec_alloc (v, nunits);
1654 for (i = 0; i < nunits; ++i)
1655 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1656 return build_constructor (vectype, v);
1657 }
1658 }
1659
1660 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1661 are in the vec pointed to by VALS. */
1662 tree
1663 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
1664 {
1665 tree c = make_node (CONSTRUCTOR);
1666 unsigned int i;
1667 constructor_elt *elt;
1668 bool constant_p = true;
1669 bool side_effects_p = false;
1670
1671 TREE_TYPE (c) = type;
1672 CONSTRUCTOR_ELTS (c) = vals;
1673
1674 FOR_EACH_VEC_SAFE_ELT (vals, i, elt)
1675 {
1676 /* Mostly ctors will have elts that don't have side-effects, so
1677 the usual case is to scan all the elements. Hence a single
1678 loop for both const and side effects, rather than one loop
1679 each (with early outs). */
1680 if (!TREE_CONSTANT (elt->value))
1681 constant_p = false;
1682 if (TREE_SIDE_EFFECTS (elt->value))
1683 side_effects_p = true;
1684 }
1685
1686 TREE_SIDE_EFFECTS (c) = side_effects_p;
1687 TREE_CONSTANT (c) = constant_p;
1688
1689 return c;
1690 }
1691
1692 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1693 INDEX and VALUE. */
1694 tree
1695 build_constructor_single (tree type, tree index, tree value)
1696 {
1697 vec<constructor_elt, va_gc> *v;
1698 constructor_elt elt = {index, value};
1699
1700 vec_alloc (v, 1);
1701 v->quick_push (elt);
1702
1703 return build_constructor (type, v);
1704 }
1705
1706
1707 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1708 are in a list pointed to by VALS. */
1709 tree
1710 build_constructor_from_list (tree type, tree vals)
1711 {
1712 tree t;
1713 vec<constructor_elt, va_gc> *v = NULL;
1714
1715 if (vals)
1716 {
1717 vec_alloc (v, list_length (vals));
1718 for (t = vals; t; t = TREE_CHAIN (t))
1719 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
1720 }
1721
1722 return build_constructor (type, v);
1723 }
1724
1725 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1726 of elements, provided as index/value pairs. */
1727
1728 tree
1729 build_constructor_va (tree type, int nelts, ...)
1730 {
1731 vec<constructor_elt, va_gc> *v = NULL;
1732 va_list p;
1733
1734 va_start (p, nelts);
1735 vec_alloc (v, nelts);
1736 while (nelts--)
1737 {
1738 tree index = va_arg (p, tree);
1739 tree value = va_arg (p, tree);
1740 CONSTRUCTOR_APPEND_ELT (v, index, value);
1741 }
1742 va_end (p);
1743 return build_constructor (type, v);
1744 }
1745
1746 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1747
1748 tree
1749 build_fixed (tree type, FIXED_VALUE_TYPE f)
1750 {
1751 tree v;
1752 FIXED_VALUE_TYPE *fp;
1753
1754 v = make_node (FIXED_CST);
1755 fp = ggc_alloc<fixed_value> ();
1756 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
1757
1758 TREE_TYPE (v) = type;
1759 TREE_FIXED_CST_PTR (v) = fp;
1760 return v;
1761 }
1762
1763 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1764
1765 tree
1766 build_real (tree type, REAL_VALUE_TYPE d)
1767 {
1768 tree v;
1769 REAL_VALUE_TYPE *dp;
1770 int overflow = 0;
1771
1772 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1773 Consider doing it via real_convert now. */
1774
1775 v = make_node (REAL_CST);
1776 dp = ggc_alloc<real_value> ();
1777 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
1778
1779 TREE_TYPE (v) = type;
1780 TREE_REAL_CST_PTR (v) = dp;
1781 TREE_OVERFLOW (v) = overflow;
1782 return v;
1783 }
1784
1785 /* Return a new REAL_CST node whose type is TYPE
1786 and whose value is the integer value of the INTEGER_CST node I. */
1787
1788 REAL_VALUE_TYPE
1789 real_value_from_int_cst (const_tree type, const_tree i)
1790 {
1791 REAL_VALUE_TYPE d;
1792
1793 /* Clear all bits of the real value type so that we can later do
1794 bitwise comparisons to see if two values are the same. */
1795 memset (&d, 0, sizeof d);
1796
1797 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, i,
1798 TYPE_SIGN (TREE_TYPE (i)));
1799 return d;
1800 }
1801
1802 /* Given a tree representing an integer constant I, return a tree
1803 representing the same value as a floating-point constant of type TYPE. */
1804
1805 tree
1806 build_real_from_int_cst (tree type, const_tree i)
1807 {
1808 tree v;
1809 int overflow = TREE_OVERFLOW (i);
1810
1811 v = build_real (type, real_value_from_int_cst (type, i));
1812
1813 TREE_OVERFLOW (v) |= overflow;
1814 return v;
1815 }
1816
1817 /* Return a newly constructed STRING_CST node whose value is
1818 the LEN characters at STR.
1819 Note that for a C string literal, LEN should include the trailing NUL.
1820 The TREE_TYPE is not initialized. */
1821
1822 tree
1823 build_string (int len, const char *str)
1824 {
1825 tree s;
1826 size_t length;
1827
1828 /* Do not waste bytes provided by padding of struct tree_string. */
1829 length = len + offsetof (struct tree_string, str) + 1;
1830
1831 record_node_allocation_statistics (STRING_CST, length);
1832
1833 s = (tree) ggc_internal_alloc (length);
1834
1835 memset (s, 0, sizeof (struct tree_typed));
1836 TREE_SET_CODE (s, STRING_CST);
1837 TREE_CONSTANT (s) = 1;
1838 TREE_STRING_LENGTH (s) = len;
1839 memcpy (s->string.str, str, len);
1840 s->string.str[len] = '\0';
1841
1842 return s;
1843 }
1844
1845 /* Return a newly constructed COMPLEX_CST node whose value is
1846 specified by the real and imaginary parts REAL and IMAG.
1847 Both REAL and IMAG should be constant nodes. TYPE, if specified,
1848 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
1849
1850 tree
1851 build_complex (tree type, tree real, tree imag)
1852 {
1853 tree t = make_node (COMPLEX_CST);
1854
1855 TREE_REALPART (t) = real;
1856 TREE_IMAGPART (t) = imag;
1857 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
1858 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
1859 return t;
1860 }
1861
1862 /* Return a constant of arithmetic type TYPE which is the
1863 multiplicative identity of the set TYPE. */
1864
1865 tree
1866 build_one_cst (tree type)
1867 {
1868 switch (TREE_CODE (type))
1869 {
1870 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1871 case POINTER_TYPE: case REFERENCE_TYPE:
1872 case OFFSET_TYPE:
1873 return build_int_cst (type, 1);
1874
1875 case REAL_TYPE:
1876 return build_real (type, dconst1);
1877
1878 case FIXED_POINT_TYPE:
1879 /* We can only generate 1 for accum types. */
1880 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1881 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
1882
1883 case VECTOR_TYPE:
1884 {
1885 tree scalar = build_one_cst (TREE_TYPE (type));
1886
1887 return build_vector_from_val (type, scalar);
1888 }
1889
1890 case COMPLEX_TYPE:
1891 return build_complex (type,
1892 build_one_cst (TREE_TYPE (type)),
1893 build_zero_cst (TREE_TYPE (type)));
1894
1895 default:
1896 gcc_unreachable ();
1897 }
1898 }
1899
1900 /* Return an integer of type TYPE containing all 1's in as much precision as
1901 it contains, or a complex or vector whose subparts are such integers. */
1902
1903 tree
1904 build_all_ones_cst (tree type)
1905 {
1906 if (TREE_CODE (type) == COMPLEX_TYPE)
1907 {
1908 tree scalar = build_all_ones_cst (TREE_TYPE (type));
1909 return build_complex (type, scalar, scalar);
1910 }
1911 else
1912 return build_minus_one_cst (type);
1913 }
1914
1915 /* Return a constant of arithmetic type TYPE which is the
1916 opposite of the multiplicative identity of the set TYPE. */
1917
1918 tree
1919 build_minus_one_cst (tree type)
1920 {
1921 switch (TREE_CODE (type))
1922 {
1923 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1924 case POINTER_TYPE: case REFERENCE_TYPE:
1925 case OFFSET_TYPE:
1926 return build_int_cst (type, -1);
1927
1928 case REAL_TYPE:
1929 return build_real (type, dconstm1);
1930
1931 case FIXED_POINT_TYPE:
1932 /* We can only generate 1 for accum types. */
1933 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1934 return build_fixed (type, fixed_from_double_int (double_int_minus_one,
1935 TYPE_MODE (type)));
1936
1937 case VECTOR_TYPE:
1938 {
1939 tree scalar = build_minus_one_cst (TREE_TYPE (type));
1940
1941 return build_vector_from_val (type, scalar);
1942 }
1943
1944 case COMPLEX_TYPE:
1945 return build_complex (type,
1946 build_minus_one_cst (TREE_TYPE (type)),
1947 build_zero_cst (TREE_TYPE (type)));
1948
1949 default:
1950 gcc_unreachable ();
1951 }
1952 }
1953
1954 /* Build 0 constant of type TYPE. This is used by constructor folding
1955 and thus the constant should be represented in memory by
1956 zero(es). */
1957
1958 tree
1959 build_zero_cst (tree type)
1960 {
1961 switch (TREE_CODE (type))
1962 {
1963 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1964 case POINTER_TYPE: case REFERENCE_TYPE:
1965 case OFFSET_TYPE: case NULLPTR_TYPE:
1966 return build_int_cst (type, 0);
1967
1968 case REAL_TYPE:
1969 return build_real (type, dconst0);
1970
1971 case FIXED_POINT_TYPE:
1972 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
1973
1974 case VECTOR_TYPE:
1975 {
1976 tree scalar = build_zero_cst (TREE_TYPE (type));
1977
1978 return build_vector_from_val (type, scalar);
1979 }
1980
1981 case COMPLEX_TYPE:
1982 {
1983 tree zero = build_zero_cst (TREE_TYPE (type));
1984
1985 return build_complex (type, zero, zero);
1986 }
1987
1988 default:
1989 if (!AGGREGATE_TYPE_P (type))
1990 return fold_convert (type, integer_zero_node);
1991 return build_constructor (type, NULL);
1992 }
1993 }
1994
1995
1996 /* Build a BINFO with LEN language slots. */
1997
1998 tree
1999 make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL)
2000 {
2001 tree t;
2002 size_t length = (offsetof (struct tree_binfo, base_binfos)
2003 + vec<tree, va_gc>::embedded_size (base_binfos));
2004
2005 record_node_allocation_statistics (TREE_BINFO, length);
2006
2007 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2008
2009 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2010
2011 TREE_SET_CODE (t, TREE_BINFO);
2012
2013 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2014
2015 return t;
2016 }
2017
2018 /* Create a CASE_LABEL_EXPR tree node and return it. */
2019
2020 tree
2021 build_case_label (tree low_value, tree high_value, tree label_decl)
2022 {
2023 tree t = make_node (CASE_LABEL_EXPR);
2024
2025 TREE_TYPE (t) = void_type_node;
2026 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2027
2028 CASE_LOW (t) = low_value;
2029 CASE_HIGH (t) = high_value;
2030 CASE_LABEL (t) = label_decl;
2031 CASE_CHAIN (t) = NULL_TREE;
2032
2033 return t;
2034 }
2035
2036 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2037 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2038 The latter determines the length of the HOST_WIDE_INT vector. */
2039
2040 tree
2041 make_int_cst_stat (int len, int ext_len MEM_STAT_DECL)
2042 {
2043 tree t;
2044 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2045 + sizeof (struct tree_int_cst));
2046
2047 gcc_assert (len);
2048 record_node_allocation_statistics (INTEGER_CST, length);
2049
2050 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2051
2052 TREE_SET_CODE (t, INTEGER_CST);
2053 TREE_INT_CST_NUNITS (t) = len;
2054 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2055 /* to_offset can only be applied to trees that are offset_int-sized
2056 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2057 must be exactly the precision of offset_int and so LEN is correct. */
2058 if (ext_len <= OFFSET_INT_ELTS)
2059 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2060 else
2061 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2062
2063 TREE_CONSTANT (t) = 1;
2064
2065 return t;
2066 }
2067
2068 /* Build a newly constructed TREE_VEC node of length LEN. */
2069
2070 tree
2071 make_tree_vec_stat (int len MEM_STAT_DECL)
2072 {
2073 tree t;
2074 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2075
2076 record_node_allocation_statistics (TREE_VEC, length);
2077
2078 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2079
2080 TREE_SET_CODE (t, TREE_VEC);
2081 TREE_VEC_LENGTH (t) = len;
2082
2083 return t;
2084 }
2085
2086 /* Grow a TREE_VEC node to new length LEN. */
2087
2088 tree
2089 grow_tree_vec_stat (tree v, int len MEM_STAT_DECL)
2090 {
2091 gcc_assert (TREE_CODE (v) == TREE_VEC);
2092
2093 int oldlen = TREE_VEC_LENGTH (v);
2094 gcc_assert (len > oldlen);
2095
2096 int oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2097 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2098
2099 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2100
2101 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2102
2103 TREE_VEC_LENGTH (v) = len;
2104
2105 return v;
2106 }
2107 \f
2108 /* Return 1 if EXPR is the integer constant zero or a complex constant
2109 of zero. */
2110
2111 int
2112 integer_zerop (const_tree expr)
2113 {
2114 STRIP_NOPS (expr);
2115
2116 switch (TREE_CODE (expr))
2117 {
2118 case INTEGER_CST:
2119 return wi::eq_p (expr, 0);
2120 case COMPLEX_CST:
2121 return (integer_zerop (TREE_REALPART (expr))
2122 && integer_zerop (TREE_IMAGPART (expr)));
2123 case VECTOR_CST:
2124 {
2125 unsigned i;
2126 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2127 if (!integer_zerop (VECTOR_CST_ELT (expr, i)))
2128 return false;
2129 return true;
2130 }
2131 default:
2132 return false;
2133 }
2134 }
2135
2136 /* Return 1 if EXPR is the integer constant one or the corresponding
2137 complex constant. */
2138
2139 int
2140 integer_onep (const_tree expr)
2141 {
2142 STRIP_NOPS (expr);
2143
2144 switch (TREE_CODE (expr))
2145 {
2146 case INTEGER_CST:
2147 return wi::eq_p (wi::to_widest (expr), 1);
2148 case COMPLEX_CST:
2149 return (integer_onep (TREE_REALPART (expr))
2150 && integer_zerop (TREE_IMAGPART (expr)));
2151 case VECTOR_CST:
2152 {
2153 unsigned i;
2154 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2155 if (!integer_onep (VECTOR_CST_ELT (expr, i)))
2156 return false;
2157 return true;
2158 }
2159 default:
2160 return false;
2161 }
2162 }
2163
2164 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2165 it contains, or a complex or vector whose subparts are such integers. */
2166
2167 int
2168 integer_all_onesp (const_tree expr)
2169 {
2170 STRIP_NOPS (expr);
2171
2172 if (TREE_CODE (expr) == COMPLEX_CST
2173 && integer_all_onesp (TREE_REALPART (expr))
2174 && integer_all_onesp (TREE_IMAGPART (expr)))
2175 return 1;
2176
2177 else if (TREE_CODE (expr) == VECTOR_CST)
2178 {
2179 unsigned i;
2180 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2181 if (!integer_all_onesp (VECTOR_CST_ELT (expr, i)))
2182 return 0;
2183 return 1;
2184 }
2185
2186 else if (TREE_CODE (expr) != INTEGER_CST)
2187 return 0;
2188
2189 return wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED) == expr;
2190 }
2191
2192 /* Return 1 if EXPR is the integer constant minus one. */
2193
2194 int
2195 integer_minus_onep (const_tree expr)
2196 {
2197 STRIP_NOPS (expr);
2198
2199 if (TREE_CODE (expr) == COMPLEX_CST)
2200 return (integer_all_onesp (TREE_REALPART (expr))
2201 && integer_zerop (TREE_IMAGPART (expr)));
2202 else
2203 return integer_all_onesp (expr);
2204 }
2205
2206 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2207 one bit on). */
2208
2209 int
2210 integer_pow2p (const_tree expr)
2211 {
2212 STRIP_NOPS (expr);
2213
2214 if (TREE_CODE (expr) == COMPLEX_CST
2215 && integer_pow2p (TREE_REALPART (expr))
2216 && integer_zerop (TREE_IMAGPART (expr)))
2217 return 1;
2218
2219 if (TREE_CODE (expr) != INTEGER_CST)
2220 return 0;
2221
2222 return wi::popcount (expr) == 1;
2223 }
2224
2225 /* Return 1 if EXPR is an integer constant other than zero or a
2226 complex constant other than zero. */
2227
2228 int
2229 integer_nonzerop (const_tree expr)
2230 {
2231 STRIP_NOPS (expr);
2232
2233 return ((TREE_CODE (expr) == INTEGER_CST
2234 && !wi::eq_p (expr, 0))
2235 || (TREE_CODE (expr) == COMPLEX_CST
2236 && (integer_nonzerop (TREE_REALPART (expr))
2237 || integer_nonzerop (TREE_IMAGPART (expr)))));
2238 }
2239
2240 /* Return 1 if EXPR is the fixed-point constant zero. */
2241
2242 int
2243 fixed_zerop (const_tree expr)
2244 {
2245 return (TREE_CODE (expr) == FIXED_CST
2246 && TREE_FIXED_CST (expr).data.is_zero ());
2247 }
2248
2249 /* Return the power of two represented by a tree node known to be a
2250 power of two. */
2251
2252 int
2253 tree_log2 (const_tree expr)
2254 {
2255 STRIP_NOPS (expr);
2256
2257 if (TREE_CODE (expr) == COMPLEX_CST)
2258 return tree_log2 (TREE_REALPART (expr));
2259
2260 return wi::exact_log2 (expr);
2261 }
2262
2263 /* Similar, but return the largest integer Y such that 2 ** Y is less
2264 than or equal to EXPR. */
2265
2266 int
2267 tree_floor_log2 (const_tree expr)
2268 {
2269 STRIP_NOPS (expr);
2270
2271 if (TREE_CODE (expr) == COMPLEX_CST)
2272 return tree_log2 (TREE_REALPART (expr));
2273
2274 return wi::floor_log2 (expr);
2275 }
2276
2277 /* Return number of known trailing zero bits in EXPR, or, if the value of
2278 EXPR is known to be zero, the precision of it's type. */
2279
2280 unsigned int
2281 tree_ctz (const_tree expr)
2282 {
2283 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2284 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2285 return 0;
2286
2287 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2288 switch (TREE_CODE (expr))
2289 {
2290 case INTEGER_CST:
2291 ret1 = wi::ctz (expr);
2292 return MIN (ret1, prec);
2293 case SSA_NAME:
2294 ret1 = wi::ctz (get_nonzero_bits (expr));
2295 return MIN (ret1, prec);
2296 case PLUS_EXPR:
2297 case MINUS_EXPR:
2298 case BIT_IOR_EXPR:
2299 case BIT_XOR_EXPR:
2300 case MIN_EXPR:
2301 case MAX_EXPR:
2302 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2303 if (ret1 == 0)
2304 return ret1;
2305 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2306 return MIN (ret1, ret2);
2307 case POINTER_PLUS_EXPR:
2308 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2309 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2310 /* Second operand is sizetype, which could be in theory
2311 wider than pointer's precision. Make sure we never
2312 return more than prec. */
2313 ret2 = MIN (ret2, prec);
2314 return MIN (ret1, ret2);
2315 case BIT_AND_EXPR:
2316 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2317 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2318 return MAX (ret1, ret2);
2319 case MULT_EXPR:
2320 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2321 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2322 return MIN (ret1 + ret2, prec);
2323 case LSHIFT_EXPR:
2324 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2325 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2326 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2327 {
2328 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2329 return MIN (ret1 + ret2, prec);
2330 }
2331 return ret1;
2332 case RSHIFT_EXPR:
2333 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2334 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2335 {
2336 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2337 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2338 if (ret1 > ret2)
2339 return ret1 - ret2;
2340 }
2341 return 0;
2342 case TRUNC_DIV_EXPR:
2343 case CEIL_DIV_EXPR:
2344 case FLOOR_DIV_EXPR:
2345 case ROUND_DIV_EXPR:
2346 case EXACT_DIV_EXPR:
2347 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2348 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2349 {
2350 int l = tree_log2 (TREE_OPERAND (expr, 1));
2351 if (l >= 0)
2352 {
2353 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2354 ret2 = l;
2355 if (ret1 > ret2)
2356 return ret1 - ret2;
2357 }
2358 }
2359 return 0;
2360 CASE_CONVERT:
2361 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2362 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2363 ret1 = prec;
2364 return MIN (ret1, prec);
2365 case SAVE_EXPR:
2366 return tree_ctz (TREE_OPERAND (expr, 0));
2367 case COND_EXPR:
2368 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2369 if (ret1 == 0)
2370 return 0;
2371 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2372 return MIN (ret1, ret2);
2373 case COMPOUND_EXPR:
2374 return tree_ctz (TREE_OPERAND (expr, 1));
2375 case ADDR_EXPR:
2376 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2377 if (ret1 > BITS_PER_UNIT)
2378 {
2379 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2380 return MIN (ret1, prec);
2381 }
2382 return 0;
2383 default:
2384 return 0;
2385 }
2386 }
2387
2388 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2389 decimal float constants, so don't return 1 for them. */
2390
2391 int
2392 real_zerop (const_tree expr)
2393 {
2394 STRIP_NOPS (expr);
2395
2396 switch (TREE_CODE (expr))
2397 {
2398 case REAL_CST:
2399 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst0)
2400 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2401 case COMPLEX_CST:
2402 return real_zerop (TREE_REALPART (expr))
2403 && real_zerop (TREE_IMAGPART (expr));
2404 case VECTOR_CST:
2405 {
2406 unsigned i;
2407 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2408 if (!real_zerop (VECTOR_CST_ELT (expr, i)))
2409 return false;
2410 return true;
2411 }
2412 default:
2413 return false;
2414 }
2415 }
2416
2417 /* Return 1 if EXPR is the real constant one in real or complex form.
2418 Trailing zeroes matter for decimal float constants, so don't return
2419 1 for them. */
2420
2421 int
2422 real_onep (const_tree expr)
2423 {
2424 STRIP_NOPS (expr);
2425
2426 switch (TREE_CODE (expr))
2427 {
2428 case REAL_CST:
2429 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst1)
2430 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2431 case COMPLEX_CST:
2432 return real_onep (TREE_REALPART (expr))
2433 && real_zerop (TREE_IMAGPART (expr));
2434 case VECTOR_CST:
2435 {
2436 unsigned i;
2437 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2438 if (!real_onep (VECTOR_CST_ELT (expr, i)))
2439 return false;
2440 return true;
2441 }
2442 default:
2443 return false;
2444 }
2445 }
2446
2447 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2448 matter for decimal float constants, so don't return 1 for them. */
2449
2450 int
2451 real_minus_onep (const_tree expr)
2452 {
2453 STRIP_NOPS (expr);
2454
2455 switch (TREE_CODE (expr))
2456 {
2457 case REAL_CST:
2458 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconstm1)
2459 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2460 case COMPLEX_CST:
2461 return real_minus_onep (TREE_REALPART (expr))
2462 && real_zerop (TREE_IMAGPART (expr));
2463 case VECTOR_CST:
2464 {
2465 unsigned i;
2466 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2467 if (!real_minus_onep (VECTOR_CST_ELT (expr, i)))
2468 return false;
2469 return true;
2470 }
2471 default:
2472 return false;
2473 }
2474 }
2475
2476 /* Nonzero if EXP is a constant or a cast of a constant. */
2477
2478 int
2479 really_constant_p (const_tree exp)
2480 {
2481 /* This is not quite the same as STRIP_NOPS. It does more. */
2482 while (CONVERT_EXPR_P (exp)
2483 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2484 exp = TREE_OPERAND (exp, 0);
2485 return TREE_CONSTANT (exp);
2486 }
2487 \f
2488 /* Return first list element whose TREE_VALUE is ELEM.
2489 Return 0 if ELEM is not in LIST. */
2490
2491 tree
2492 value_member (tree elem, tree list)
2493 {
2494 while (list)
2495 {
2496 if (elem == TREE_VALUE (list))
2497 return list;
2498 list = TREE_CHAIN (list);
2499 }
2500 return NULL_TREE;
2501 }
2502
2503 /* Return first list element whose TREE_PURPOSE is ELEM.
2504 Return 0 if ELEM is not in LIST. */
2505
2506 tree
2507 purpose_member (const_tree elem, tree list)
2508 {
2509 while (list)
2510 {
2511 if (elem == TREE_PURPOSE (list))
2512 return list;
2513 list = TREE_CHAIN (list);
2514 }
2515 return NULL_TREE;
2516 }
2517
2518 /* Return true if ELEM is in V. */
2519
2520 bool
2521 vec_member (const_tree elem, vec<tree, va_gc> *v)
2522 {
2523 unsigned ix;
2524 tree t;
2525 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2526 if (elem == t)
2527 return true;
2528 return false;
2529 }
2530
2531 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2532 NULL_TREE. */
2533
2534 tree
2535 chain_index (int idx, tree chain)
2536 {
2537 for (; chain && idx > 0; --idx)
2538 chain = TREE_CHAIN (chain);
2539 return chain;
2540 }
2541
2542 /* Return nonzero if ELEM is part of the chain CHAIN. */
2543
2544 int
2545 chain_member (const_tree elem, const_tree chain)
2546 {
2547 while (chain)
2548 {
2549 if (elem == chain)
2550 return 1;
2551 chain = DECL_CHAIN (chain);
2552 }
2553
2554 return 0;
2555 }
2556
2557 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2558 We expect a null pointer to mark the end of the chain.
2559 This is the Lisp primitive `length'. */
2560
2561 int
2562 list_length (const_tree t)
2563 {
2564 const_tree p = t;
2565 #ifdef ENABLE_TREE_CHECKING
2566 const_tree q = t;
2567 #endif
2568 int len = 0;
2569
2570 while (p)
2571 {
2572 p = TREE_CHAIN (p);
2573 #ifdef ENABLE_TREE_CHECKING
2574 if (len % 2)
2575 q = TREE_CHAIN (q);
2576 gcc_assert (p != q);
2577 #endif
2578 len++;
2579 }
2580
2581 return len;
2582 }
2583
2584 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2585 UNION_TYPE TYPE, or NULL_TREE if none. */
2586
2587 tree
2588 first_field (const_tree type)
2589 {
2590 tree t = TYPE_FIELDS (type);
2591 while (t && TREE_CODE (t) != FIELD_DECL)
2592 t = TREE_CHAIN (t);
2593 return t;
2594 }
2595
2596 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2597 by modifying the last node in chain 1 to point to chain 2.
2598 This is the Lisp primitive `nconc'. */
2599
2600 tree
2601 chainon (tree op1, tree op2)
2602 {
2603 tree t1;
2604
2605 if (!op1)
2606 return op2;
2607 if (!op2)
2608 return op1;
2609
2610 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
2611 continue;
2612 TREE_CHAIN (t1) = op2;
2613
2614 #ifdef ENABLE_TREE_CHECKING
2615 {
2616 tree t2;
2617 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
2618 gcc_assert (t2 != t1);
2619 }
2620 #endif
2621
2622 return op1;
2623 }
2624
2625 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2626
2627 tree
2628 tree_last (tree chain)
2629 {
2630 tree next;
2631 if (chain)
2632 while ((next = TREE_CHAIN (chain)))
2633 chain = next;
2634 return chain;
2635 }
2636
2637 /* Reverse the order of elements in the chain T,
2638 and return the new head of the chain (old last element). */
2639
2640 tree
2641 nreverse (tree t)
2642 {
2643 tree prev = 0, decl, next;
2644 for (decl = t; decl; decl = next)
2645 {
2646 /* We shouldn't be using this function to reverse BLOCK chains; we
2647 have blocks_nreverse for that. */
2648 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
2649 next = TREE_CHAIN (decl);
2650 TREE_CHAIN (decl) = prev;
2651 prev = decl;
2652 }
2653 return prev;
2654 }
2655 \f
2656 /* Return a newly created TREE_LIST node whose
2657 purpose and value fields are PARM and VALUE. */
2658
2659 tree
2660 build_tree_list_stat (tree parm, tree value MEM_STAT_DECL)
2661 {
2662 tree t = make_node_stat (TREE_LIST PASS_MEM_STAT);
2663 TREE_PURPOSE (t) = parm;
2664 TREE_VALUE (t) = value;
2665 return t;
2666 }
2667
2668 /* Build a chain of TREE_LIST nodes from a vector. */
2669
2670 tree
2671 build_tree_list_vec_stat (const vec<tree, va_gc> *vec MEM_STAT_DECL)
2672 {
2673 tree ret = NULL_TREE;
2674 tree *pp = &ret;
2675 unsigned int i;
2676 tree t;
2677 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
2678 {
2679 *pp = build_tree_list_stat (NULL, t PASS_MEM_STAT);
2680 pp = &TREE_CHAIN (*pp);
2681 }
2682 return ret;
2683 }
2684
2685 /* Return a newly created TREE_LIST node whose
2686 purpose and value fields are PURPOSE and VALUE
2687 and whose TREE_CHAIN is CHAIN. */
2688
2689 tree
2690 tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL)
2691 {
2692 tree node;
2693
2694 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
2695 memset (node, 0, sizeof (struct tree_common));
2696
2697 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
2698
2699 TREE_SET_CODE (node, TREE_LIST);
2700 TREE_CHAIN (node) = chain;
2701 TREE_PURPOSE (node) = purpose;
2702 TREE_VALUE (node) = value;
2703 return node;
2704 }
2705
2706 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2707 trees. */
2708
2709 vec<tree, va_gc> *
2710 ctor_to_vec (tree ctor)
2711 {
2712 vec<tree, va_gc> *vec;
2713 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
2714 unsigned int ix;
2715 tree val;
2716
2717 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
2718 vec->quick_push (val);
2719
2720 return vec;
2721 }
2722 \f
2723 /* Return the size nominally occupied by an object of type TYPE
2724 when it resides in memory. The value is measured in units of bytes,
2725 and its data type is that normally used for type sizes
2726 (which is the first type created by make_signed_type or
2727 make_unsigned_type). */
2728
2729 tree
2730 size_in_bytes (const_tree type)
2731 {
2732 tree t;
2733
2734 if (type == error_mark_node)
2735 return integer_zero_node;
2736
2737 type = TYPE_MAIN_VARIANT (type);
2738 t = TYPE_SIZE_UNIT (type);
2739
2740 if (t == 0)
2741 {
2742 lang_hooks.types.incomplete_type_error (NULL_TREE, type);
2743 return size_zero_node;
2744 }
2745
2746 return t;
2747 }
2748
2749 /* Return the size of TYPE (in bytes) as a wide integer
2750 or return -1 if the size can vary or is larger than an integer. */
2751
2752 HOST_WIDE_INT
2753 int_size_in_bytes (const_tree type)
2754 {
2755 tree t;
2756
2757 if (type == error_mark_node)
2758 return 0;
2759
2760 type = TYPE_MAIN_VARIANT (type);
2761 t = TYPE_SIZE_UNIT (type);
2762
2763 if (t && tree_fits_uhwi_p (t))
2764 return TREE_INT_CST_LOW (t);
2765 else
2766 return -1;
2767 }
2768
2769 /* Return the maximum size of TYPE (in bytes) as a wide integer
2770 or return -1 if the size can vary or is larger than an integer. */
2771
2772 HOST_WIDE_INT
2773 max_int_size_in_bytes (const_tree type)
2774 {
2775 HOST_WIDE_INT size = -1;
2776 tree size_tree;
2777
2778 /* If this is an array type, check for a possible MAX_SIZE attached. */
2779
2780 if (TREE_CODE (type) == ARRAY_TYPE)
2781 {
2782 size_tree = TYPE_ARRAY_MAX_SIZE (type);
2783
2784 if (size_tree && tree_fits_uhwi_p (size_tree))
2785 size = tree_to_uhwi (size_tree);
2786 }
2787
2788 /* If we still haven't been able to get a size, see if the language
2789 can compute a maximum size. */
2790
2791 if (size == -1)
2792 {
2793 size_tree = lang_hooks.types.max_size (type);
2794
2795 if (size_tree && tree_fits_uhwi_p (size_tree))
2796 size = tree_to_uhwi (size_tree);
2797 }
2798
2799 return size;
2800 }
2801 \f
2802 /* Return the bit position of FIELD, in bits from the start of the record.
2803 This is a tree of type bitsizetype. */
2804
2805 tree
2806 bit_position (const_tree field)
2807 {
2808 return bit_from_pos (DECL_FIELD_OFFSET (field),
2809 DECL_FIELD_BIT_OFFSET (field));
2810 }
2811
2812 /* Likewise, but return as an integer. It must be representable in
2813 that way (since it could be a signed value, we don't have the
2814 option of returning -1 like int_size_in_byte can. */
2815
2816 HOST_WIDE_INT
2817 int_bit_position (const_tree field)
2818 {
2819 return tree_to_shwi (bit_position (field));
2820 }
2821 \f
2822 /* Return the byte position of FIELD, in bytes from the start of the record.
2823 This is a tree of type sizetype. */
2824
2825 tree
2826 byte_position (const_tree field)
2827 {
2828 return byte_from_pos (DECL_FIELD_OFFSET (field),
2829 DECL_FIELD_BIT_OFFSET (field));
2830 }
2831
2832 /* Likewise, but return as an integer. It must be representable in
2833 that way (since it could be a signed value, we don't have the
2834 option of returning -1 like int_size_in_byte can. */
2835
2836 HOST_WIDE_INT
2837 int_byte_position (const_tree field)
2838 {
2839 return tree_to_shwi (byte_position (field));
2840 }
2841 \f
2842 /* Return the strictest alignment, in bits, that T is known to have. */
2843
2844 unsigned int
2845 expr_align (const_tree t)
2846 {
2847 unsigned int align0, align1;
2848
2849 switch (TREE_CODE (t))
2850 {
2851 CASE_CONVERT: case NON_LVALUE_EXPR:
2852 /* If we have conversions, we know that the alignment of the
2853 object must meet each of the alignments of the types. */
2854 align0 = expr_align (TREE_OPERAND (t, 0));
2855 align1 = TYPE_ALIGN (TREE_TYPE (t));
2856 return MAX (align0, align1);
2857
2858 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
2859 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
2860 case CLEANUP_POINT_EXPR:
2861 /* These don't change the alignment of an object. */
2862 return expr_align (TREE_OPERAND (t, 0));
2863
2864 case COND_EXPR:
2865 /* The best we can do is say that the alignment is the least aligned
2866 of the two arms. */
2867 align0 = expr_align (TREE_OPERAND (t, 1));
2868 align1 = expr_align (TREE_OPERAND (t, 2));
2869 return MIN (align0, align1);
2870
2871 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
2872 meaningfully, it's always 1. */
2873 case LABEL_DECL: case CONST_DECL:
2874 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
2875 case FUNCTION_DECL:
2876 gcc_assert (DECL_ALIGN (t) != 0);
2877 return DECL_ALIGN (t);
2878
2879 default:
2880 break;
2881 }
2882
2883 /* Otherwise take the alignment from that of the type. */
2884 return TYPE_ALIGN (TREE_TYPE (t));
2885 }
2886 \f
2887 /* Return, as a tree node, the number of elements for TYPE (which is an
2888 ARRAY_TYPE) minus one. This counts only elements of the top array. */
2889
2890 tree
2891 array_type_nelts (const_tree type)
2892 {
2893 tree index_type, min, max;
2894
2895 /* If they did it with unspecified bounds, then we should have already
2896 given an error about it before we got here. */
2897 if (! TYPE_DOMAIN (type))
2898 return error_mark_node;
2899
2900 index_type = TYPE_DOMAIN (type);
2901 min = TYPE_MIN_VALUE (index_type);
2902 max = TYPE_MAX_VALUE (index_type);
2903
2904 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
2905 if (!max)
2906 return error_mark_node;
2907
2908 return (integer_zerop (min)
2909 ? max
2910 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
2911 }
2912 \f
2913 /* If arg is static -- a reference to an object in static storage -- then
2914 return the object. This is not the same as the C meaning of `static'.
2915 If arg isn't static, return NULL. */
2916
2917 tree
2918 staticp (tree arg)
2919 {
2920 switch (TREE_CODE (arg))
2921 {
2922 case FUNCTION_DECL:
2923 /* Nested functions are static, even though taking their address will
2924 involve a trampoline as we unnest the nested function and create
2925 the trampoline on the tree level. */
2926 return arg;
2927
2928 case VAR_DECL:
2929 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
2930 && ! DECL_THREAD_LOCAL_P (arg)
2931 && ! DECL_DLLIMPORT_P (arg)
2932 ? arg : NULL);
2933
2934 case CONST_DECL:
2935 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
2936 ? arg : NULL);
2937
2938 case CONSTRUCTOR:
2939 return TREE_STATIC (arg) ? arg : NULL;
2940
2941 case LABEL_DECL:
2942 case STRING_CST:
2943 return arg;
2944
2945 case COMPONENT_REF:
2946 /* If the thing being referenced is not a field, then it is
2947 something language specific. */
2948 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
2949
2950 /* If we are referencing a bitfield, we can't evaluate an
2951 ADDR_EXPR at compile time and so it isn't a constant. */
2952 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
2953 return NULL;
2954
2955 return staticp (TREE_OPERAND (arg, 0));
2956
2957 case BIT_FIELD_REF:
2958 return NULL;
2959
2960 case INDIRECT_REF:
2961 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
2962
2963 case ARRAY_REF:
2964 case ARRAY_RANGE_REF:
2965 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
2966 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
2967 return staticp (TREE_OPERAND (arg, 0));
2968 else
2969 return NULL;
2970
2971 case COMPOUND_LITERAL_EXPR:
2972 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
2973
2974 default:
2975 return NULL;
2976 }
2977 }
2978
2979 \f
2980
2981
2982 /* Return whether OP is a DECL whose address is function-invariant. */
2983
2984 bool
2985 decl_address_invariant_p (const_tree op)
2986 {
2987 /* The conditions below are slightly less strict than the one in
2988 staticp. */
2989
2990 switch (TREE_CODE (op))
2991 {
2992 case PARM_DECL:
2993 case RESULT_DECL:
2994 case LABEL_DECL:
2995 case FUNCTION_DECL:
2996 return true;
2997
2998 case VAR_DECL:
2999 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3000 || DECL_THREAD_LOCAL_P (op)
3001 || DECL_CONTEXT (op) == current_function_decl
3002 || decl_function_context (op) == current_function_decl)
3003 return true;
3004 break;
3005
3006 case CONST_DECL:
3007 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3008 || decl_function_context (op) == current_function_decl)
3009 return true;
3010 break;
3011
3012 default:
3013 break;
3014 }
3015
3016 return false;
3017 }
3018
3019 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3020
3021 bool
3022 decl_address_ip_invariant_p (const_tree op)
3023 {
3024 /* The conditions below are slightly less strict than the one in
3025 staticp. */
3026
3027 switch (TREE_CODE (op))
3028 {
3029 case LABEL_DECL:
3030 case FUNCTION_DECL:
3031 case STRING_CST:
3032 return true;
3033
3034 case VAR_DECL:
3035 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3036 && !DECL_DLLIMPORT_P (op))
3037 || DECL_THREAD_LOCAL_P (op))
3038 return true;
3039 break;
3040
3041 case CONST_DECL:
3042 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3043 return true;
3044 break;
3045
3046 default:
3047 break;
3048 }
3049
3050 return false;
3051 }
3052
3053
3054 /* Return true if T is function-invariant (internal function, does
3055 not handle arithmetic; that's handled in skip_simple_arithmetic and
3056 tree_invariant_p). */
3057
3058 static bool tree_invariant_p (tree t);
3059
3060 static bool
3061 tree_invariant_p_1 (tree t)
3062 {
3063 tree op;
3064
3065 if (TREE_CONSTANT (t)
3066 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3067 return true;
3068
3069 switch (TREE_CODE (t))
3070 {
3071 case SAVE_EXPR:
3072 return true;
3073
3074 case ADDR_EXPR:
3075 op = TREE_OPERAND (t, 0);
3076 while (handled_component_p (op))
3077 {
3078 switch (TREE_CODE (op))
3079 {
3080 case ARRAY_REF:
3081 case ARRAY_RANGE_REF:
3082 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3083 || TREE_OPERAND (op, 2) != NULL_TREE
3084 || TREE_OPERAND (op, 3) != NULL_TREE)
3085 return false;
3086 break;
3087
3088 case COMPONENT_REF:
3089 if (TREE_OPERAND (op, 2) != NULL_TREE)
3090 return false;
3091 break;
3092
3093 default:;
3094 }
3095 op = TREE_OPERAND (op, 0);
3096 }
3097
3098 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3099
3100 default:
3101 break;
3102 }
3103
3104 return false;
3105 }
3106
3107 /* Return true if T is function-invariant. */
3108
3109 static bool
3110 tree_invariant_p (tree t)
3111 {
3112 tree inner = skip_simple_arithmetic (t);
3113 return tree_invariant_p_1 (inner);
3114 }
3115
3116 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3117 Do this to any expression which may be used in more than one place,
3118 but must be evaluated only once.
3119
3120 Normally, expand_expr would reevaluate the expression each time.
3121 Calling save_expr produces something that is evaluated and recorded
3122 the first time expand_expr is called on it. Subsequent calls to
3123 expand_expr just reuse the recorded value.
3124
3125 The call to expand_expr that generates code that actually computes
3126 the value is the first call *at compile time*. Subsequent calls
3127 *at compile time* generate code to use the saved value.
3128 This produces correct result provided that *at run time* control
3129 always flows through the insns made by the first expand_expr
3130 before reaching the other places where the save_expr was evaluated.
3131 You, the caller of save_expr, must make sure this is so.
3132
3133 Constants, and certain read-only nodes, are returned with no
3134 SAVE_EXPR because that is safe. Expressions containing placeholders
3135 are not touched; see tree.def for an explanation of what these
3136 are used for. */
3137
3138 tree
3139 save_expr (tree expr)
3140 {
3141 tree t = fold (expr);
3142 tree inner;
3143
3144 /* If the tree evaluates to a constant, then we don't want to hide that
3145 fact (i.e. this allows further folding, and direct checks for constants).
3146 However, a read-only object that has side effects cannot be bypassed.
3147 Since it is no problem to reevaluate literals, we just return the
3148 literal node. */
3149 inner = skip_simple_arithmetic (t);
3150 if (TREE_CODE (inner) == ERROR_MARK)
3151 return inner;
3152
3153 if (tree_invariant_p_1 (inner))
3154 return t;
3155
3156 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3157 it means that the size or offset of some field of an object depends on
3158 the value within another field.
3159
3160 Note that it must not be the case that T contains both a PLACEHOLDER_EXPR
3161 and some variable since it would then need to be both evaluated once and
3162 evaluated more than once. Front-ends must assure this case cannot
3163 happen by surrounding any such subexpressions in their own SAVE_EXPR
3164 and forcing evaluation at the proper time. */
3165 if (contains_placeholder_p (inner))
3166 return t;
3167
3168 t = build1 (SAVE_EXPR, TREE_TYPE (expr), t);
3169 SET_EXPR_LOCATION (t, EXPR_LOCATION (expr));
3170
3171 /* This expression might be placed ahead of a jump to ensure that the
3172 value was computed on both sides of the jump. So make sure it isn't
3173 eliminated as dead. */
3174 TREE_SIDE_EFFECTS (t) = 1;
3175 return t;
3176 }
3177
3178 /* Look inside EXPR into any simple arithmetic operations. Return the
3179 outermost non-arithmetic or non-invariant node. */
3180
3181 tree
3182 skip_simple_arithmetic (tree expr)
3183 {
3184 /* We don't care about whether this can be used as an lvalue in this
3185 context. */
3186 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3187 expr = TREE_OPERAND (expr, 0);
3188
3189 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3190 a constant, it will be more efficient to not make another SAVE_EXPR since
3191 it will allow better simplification and GCSE will be able to merge the
3192 computations if they actually occur. */
3193 while (true)
3194 {
3195 if (UNARY_CLASS_P (expr))
3196 expr = TREE_OPERAND (expr, 0);
3197 else if (BINARY_CLASS_P (expr))
3198 {
3199 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3200 expr = TREE_OPERAND (expr, 0);
3201 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3202 expr = TREE_OPERAND (expr, 1);
3203 else
3204 break;
3205 }
3206 else
3207 break;
3208 }
3209
3210 return expr;
3211 }
3212
3213 /* Look inside EXPR into simple arithmetic operations involving constants.
3214 Return the outermost non-arithmetic or non-constant node. */
3215
3216 tree
3217 skip_simple_constant_arithmetic (tree expr)
3218 {
3219 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3220 expr = TREE_OPERAND (expr, 0);
3221
3222 while (true)
3223 {
3224 if (UNARY_CLASS_P (expr))
3225 expr = TREE_OPERAND (expr, 0);
3226 else if (BINARY_CLASS_P (expr))
3227 {
3228 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3229 expr = TREE_OPERAND (expr, 0);
3230 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3231 expr = TREE_OPERAND (expr, 1);
3232 else
3233 break;
3234 }
3235 else
3236 break;
3237 }
3238
3239 return expr;
3240 }
3241
3242 /* Return which tree structure is used by T. */
3243
3244 enum tree_node_structure_enum
3245 tree_node_structure (const_tree t)
3246 {
3247 const enum tree_code code = TREE_CODE (t);
3248 return tree_node_structure_for_code (code);
3249 }
3250
3251 /* Set various status flags when building a CALL_EXPR object T. */
3252
3253 static void
3254 process_call_operands (tree t)
3255 {
3256 bool side_effects = TREE_SIDE_EFFECTS (t);
3257 bool read_only = false;
3258 int i = call_expr_flags (t);
3259
3260 /* Calls have side-effects, except those to const or pure functions. */
3261 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3262 side_effects = true;
3263 /* Propagate TREE_READONLY of arguments for const functions. */
3264 if (i & ECF_CONST)
3265 read_only = true;
3266
3267 if (!side_effects || read_only)
3268 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3269 {
3270 tree op = TREE_OPERAND (t, i);
3271 if (op && TREE_SIDE_EFFECTS (op))
3272 side_effects = true;
3273 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3274 read_only = false;
3275 }
3276
3277 TREE_SIDE_EFFECTS (t) = side_effects;
3278 TREE_READONLY (t) = read_only;
3279 }
3280 \f
3281 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3282 size or offset that depends on a field within a record. */
3283
3284 bool
3285 contains_placeholder_p (const_tree exp)
3286 {
3287 enum tree_code code;
3288
3289 if (!exp)
3290 return 0;
3291
3292 code = TREE_CODE (exp);
3293 if (code == PLACEHOLDER_EXPR)
3294 return 1;
3295
3296 switch (TREE_CODE_CLASS (code))
3297 {
3298 case tcc_reference:
3299 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3300 position computations since they will be converted into a
3301 WITH_RECORD_EXPR involving the reference, which will assume
3302 here will be valid. */
3303 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3304
3305 case tcc_exceptional:
3306 if (code == TREE_LIST)
3307 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3308 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3309 break;
3310
3311 case tcc_unary:
3312 case tcc_binary:
3313 case tcc_comparison:
3314 case tcc_expression:
3315 switch (code)
3316 {
3317 case COMPOUND_EXPR:
3318 /* Ignoring the first operand isn't quite right, but works best. */
3319 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3320
3321 case COND_EXPR:
3322 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3323 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3324 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3325
3326 case SAVE_EXPR:
3327 /* The save_expr function never wraps anything containing
3328 a PLACEHOLDER_EXPR. */
3329 return 0;
3330
3331 default:
3332 break;
3333 }
3334
3335 switch (TREE_CODE_LENGTH (code))
3336 {
3337 case 1:
3338 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3339 case 2:
3340 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3341 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3342 default:
3343 return 0;
3344 }
3345
3346 case tcc_vl_exp:
3347 switch (code)
3348 {
3349 case CALL_EXPR:
3350 {
3351 const_tree arg;
3352 const_call_expr_arg_iterator iter;
3353 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3354 if (CONTAINS_PLACEHOLDER_P (arg))
3355 return 1;
3356 return 0;
3357 }
3358 default:
3359 return 0;
3360 }
3361
3362 default:
3363 return 0;
3364 }
3365 return 0;
3366 }
3367
3368 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3369 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3370 field positions. */
3371
3372 static bool
3373 type_contains_placeholder_1 (const_tree type)
3374 {
3375 /* If the size contains a placeholder or the parent type (component type in
3376 the case of arrays) type involves a placeholder, this type does. */
3377 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3378 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3379 || (!POINTER_TYPE_P (type)
3380 && TREE_TYPE (type)
3381 && type_contains_placeholder_p (TREE_TYPE (type))))
3382 return true;
3383
3384 /* Now do type-specific checks. Note that the last part of the check above
3385 greatly limits what we have to do below. */
3386 switch (TREE_CODE (type))
3387 {
3388 case VOID_TYPE:
3389 case COMPLEX_TYPE:
3390 case ENUMERAL_TYPE:
3391 case BOOLEAN_TYPE:
3392 case POINTER_TYPE:
3393 case OFFSET_TYPE:
3394 case REFERENCE_TYPE:
3395 case METHOD_TYPE:
3396 case FUNCTION_TYPE:
3397 case VECTOR_TYPE:
3398 case NULLPTR_TYPE:
3399 return false;
3400
3401 case INTEGER_TYPE:
3402 case REAL_TYPE:
3403 case FIXED_POINT_TYPE:
3404 /* Here we just check the bounds. */
3405 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3406 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3407
3408 case ARRAY_TYPE:
3409 /* We have already checked the component type above, so just check the
3410 domain type. */
3411 return type_contains_placeholder_p (TYPE_DOMAIN (type));
3412
3413 case RECORD_TYPE:
3414 case UNION_TYPE:
3415 case QUAL_UNION_TYPE:
3416 {
3417 tree field;
3418
3419 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3420 if (TREE_CODE (field) == FIELD_DECL
3421 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3422 || (TREE_CODE (type) == QUAL_UNION_TYPE
3423 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3424 || type_contains_placeholder_p (TREE_TYPE (field))))
3425 return true;
3426
3427 return false;
3428 }
3429
3430 default:
3431 gcc_unreachable ();
3432 }
3433 }
3434
3435 /* Wrapper around above function used to cache its result. */
3436
3437 bool
3438 type_contains_placeholder_p (tree type)
3439 {
3440 bool result;
3441
3442 /* If the contains_placeholder_bits field has been initialized,
3443 then we know the answer. */
3444 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3445 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3446
3447 /* Indicate that we've seen this type node, and the answer is false.
3448 This is what we want to return if we run into recursion via fields. */
3449 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3450
3451 /* Compute the real value. */
3452 result = type_contains_placeholder_1 (type);
3453
3454 /* Store the real value. */
3455 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3456
3457 return result;
3458 }
3459 \f
3460 /* Push tree EXP onto vector QUEUE if it is not already present. */
3461
3462 static void
3463 push_without_duplicates (tree exp, vec<tree> *queue)
3464 {
3465 unsigned int i;
3466 tree iter;
3467
3468 FOR_EACH_VEC_ELT (*queue, i, iter)
3469 if (simple_cst_equal (iter, exp) == 1)
3470 break;
3471
3472 if (!iter)
3473 queue->safe_push (exp);
3474 }
3475
3476 /* Given a tree EXP, find all occurrences of references to fields
3477 in a PLACEHOLDER_EXPR and place them in vector REFS without
3478 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3479 we assume here that EXP contains only arithmetic expressions
3480 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3481 argument list. */
3482
3483 void
3484 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3485 {
3486 enum tree_code code = TREE_CODE (exp);
3487 tree inner;
3488 int i;
3489
3490 /* We handle TREE_LIST and COMPONENT_REF separately. */
3491 if (code == TREE_LIST)
3492 {
3493 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3494 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3495 }
3496 else if (code == COMPONENT_REF)
3497 {
3498 for (inner = TREE_OPERAND (exp, 0);
3499 REFERENCE_CLASS_P (inner);
3500 inner = TREE_OPERAND (inner, 0))
3501 ;
3502
3503 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3504 push_without_duplicates (exp, refs);
3505 else
3506 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3507 }
3508 else
3509 switch (TREE_CODE_CLASS (code))
3510 {
3511 case tcc_constant:
3512 break;
3513
3514 case tcc_declaration:
3515 /* Variables allocated to static storage can stay. */
3516 if (!TREE_STATIC (exp))
3517 push_without_duplicates (exp, refs);
3518 break;
3519
3520 case tcc_expression:
3521 /* This is the pattern built in ada/make_aligning_type. */
3522 if (code == ADDR_EXPR
3523 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3524 {
3525 push_without_duplicates (exp, refs);
3526 break;
3527 }
3528
3529 /* Fall through... */
3530
3531 case tcc_exceptional:
3532 case tcc_unary:
3533 case tcc_binary:
3534 case tcc_comparison:
3535 case tcc_reference:
3536 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3537 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3538 break;
3539
3540 case tcc_vl_exp:
3541 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3542 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3543 break;
3544
3545 default:
3546 gcc_unreachable ();
3547 }
3548 }
3549
3550 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3551 return a tree with all occurrences of references to F in a
3552 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3553 CONST_DECLs. Note that we assume here that EXP contains only
3554 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3555 occurring only in their argument list. */
3556
3557 tree
3558 substitute_in_expr (tree exp, tree f, tree r)
3559 {
3560 enum tree_code code = TREE_CODE (exp);
3561 tree op0, op1, op2, op3;
3562 tree new_tree;
3563
3564 /* We handle TREE_LIST and COMPONENT_REF separately. */
3565 if (code == TREE_LIST)
3566 {
3567 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3568 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3569 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3570 return exp;
3571
3572 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3573 }
3574 else if (code == COMPONENT_REF)
3575 {
3576 tree inner;
3577
3578 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3579 and it is the right field, replace it with R. */
3580 for (inner = TREE_OPERAND (exp, 0);
3581 REFERENCE_CLASS_P (inner);
3582 inner = TREE_OPERAND (inner, 0))
3583 ;
3584
3585 /* The field. */
3586 op1 = TREE_OPERAND (exp, 1);
3587
3588 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3589 return r;
3590
3591 /* If this expression hasn't been completed let, leave it alone. */
3592 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
3593 return exp;
3594
3595 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3596 if (op0 == TREE_OPERAND (exp, 0))
3597 return exp;
3598
3599 new_tree
3600 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
3601 }
3602 else
3603 switch (TREE_CODE_CLASS (code))
3604 {
3605 case tcc_constant:
3606 return exp;
3607
3608 case tcc_declaration:
3609 if (exp == f)
3610 return r;
3611 else
3612 return exp;
3613
3614 case tcc_expression:
3615 if (exp == f)
3616 return r;
3617
3618 /* Fall through... */
3619
3620 case tcc_exceptional:
3621 case tcc_unary:
3622 case tcc_binary:
3623 case tcc_comparison:
3624 case tcc_reference:
3625 switch (TREE_CODE_LENGTH (code))
3626 {
3627 case 0:
3628 return exp;
3629
3630 case 1:
3631 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3632 if (op0 == TREE_OPERAND (exp, 0))
3633 return exp;
3634
3635 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3636 break;
3637
3638 case 2:
3639 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3640 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3641
3642 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3643 return exp;
3644
3645 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3646 break;
3647
3648 case 3:
3649 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3650 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3651 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3652
3653 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3654 && op2 == TREE_OPERAND (exp, 2))
3655 return exp;
3656
3657 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3658 break;
3659
3660 case 4:
3661 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3662 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3663 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3664 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
3665
3666 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3667 && op2 == TREE_OPERAND (exp, 2)
3668 && op3 == TREE_OPERAND (exp, 3))
3669 return exp;
3670
3671 new_tree
3672 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3673 break;
3674
3675 default:
3676 gcc_unreachable ();
3677 }
3678 break;
3679
3680 case tcc_vl_exp:
3681 {
3682 int i;
3683
3684 new_tree = NULL_TREE;
3685
3686 /* If we are trying to replace F with a constant, inline back
3687 functions which do nothing else than computing a value from
3688 the arguments they are passed. This makes it possible to
3689 fold partially or entirely the replacement expression. */
3690 if (CONSTANT_CLASS_P (r) && code == CALL_EXPR)
3691 {
3692 tree t = maybe_inline_call_in_expr (exp);
3693 if (t)
3694 return SUBSTITUTE_IN_EXPR (t, f, r);
3695 }
3696
3697 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3698 {
3699 tree op = TREE_OPERAND (exp, i);
3700 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
3701 if (new_op != op)
3702 {
3703 if (!new_tree)
3704 new_tree = copy_node (exp);
3705 TREE_OPERAND (new_tree, i) = new_op;
3706 }
3707 }
3708
3709 if (new_tree)
3710 {
3711 new_tree = fold (new_tree);
3712 if (TREE_CODE (new_tree) == CALL_EXPR)
3713 process_call_operands (new_tree);
3714 }
3715 else
3716 return exp;
3717 }
3718 break;
3719
3720 default:
3721 gcc_unreachable ();
3722 }
3723
3724 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3725
3726 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3727 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3728
3729 return new_tree;
3730 }
3731
3732 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3733 for it within OBJ, a tree that is an object or a chain of references. */
3734
3735 tree
3736 substitute_placeholder_in_expr (tree exp, tree obj)
3737 {
3738 enum tree_code code = TREE_CODE (exp);
3739 tree op0, op1, op2, op3;
3740 tree new_tree;
3741
3742 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3743 in the chain of OBJ. */
3744 if (code == PLACEHOLDER_EXPR)
3745 {
3746 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
3747 tree elt;
3748
3749 for (elt = obj; elt != 0;
3750 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3751 || TREE_CODE (elt) == COND_EXPR)
3752 ? TREE_OPERAND (elt, 1)
3753 : (REFERENCE_CLASS_P (elt)
3754 || UNARY_CLASS_P (elt)
3755 || BINARY_CLASS_P (elt)
3756 || VL_EXP_CLASS_P (elt)
3757 || EXPRESSION_CLASS_P (elt))
3758 ? TREE_OPERAND (elt, 0) : 0))
3759 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
3760 return elt;
3761
3762 for (elt = obj; elt != 0;
3763 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3764 || TREE_CODE (elt) == COND_EXPR)
3765 ? TREE_OPERAND (elt, 1)
3766 : (REFERENCE_CLASS_P (elt)
3767 || UNARY_CLASS_P (elt)
3768 || BINARY_CLASS_P (elt)
3769 || VL_EXP_CLASS_P (elt)
3770 || EXPRESSION_CLASS_P (elt))
3771 ? TREE_OPERAND (elt, 0) : 0))
3772 if (POINTER_TYPE_P (TREE_TYPE (elt))
3773 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
3774 == need_type))
3775 return fold_build1 (INDIRECT_REF, need_type, elt);
3776
3777 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
3778 survives until RTL generation, there will be an error. */
3779 return exp;
3780 }
3781
3782 /* TREE_LIST is special because we need to look at TREE_VALUE
3783 and TREE_CHAIN, not TREE_OPERANDS. */
3784 else if (code == TREE_LIST)
3785 {
3786 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
3787 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
3788 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3789 return exp;
3790
3791 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3792 }
3793 else
3794 switch (TREE_CODE_CLASS (code))
3795 {
3796 case tcc_constant:
3797 case tcc_declaration:
3798 return exp;
3799
3800 case tcc_exceptional:
3801 case tcc_unary:
3802 case tcc_binary:
3803 case tcc_comparison:
3804 case tcc_expression:
3805 case tcc_reference:
3806 case tcc_statement:
3807 switch (TREE_CODE_LENGTH (code))
3808 {
3809 case 0:
3810 return exp;
3811
3812 case 1:
3813 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3814 if (op0 == TREE_OPERAND (exp, 0))
3815 return exp;
3816
3817 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3818 break;
3819
3820 case 2:
3821 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3822 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3823
3824 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3825 return exp;
3826
3827 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3828 break;
3829
3830 case 3:
3831 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3832 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3833 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3834
3835 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3836 && op2 == TREE_OPERAND (exp, 2))
3837 return exp;
3838
3839 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3840 break;
3841
3842 case 4:
3843 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3844 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3845 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3846 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
3847
3848 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3849 && op2 == TREE_OPERAND (exp, 2)
3850 && op3 == TREE_OPERAND (exp, 3))
3851 return exp;
3852
3853 new_tree
3854 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3855 break;
3856
3857 default:
3858 gcc_unreachable ();
3859 }
3860 break;
3861
3862 case tcc_vl_exp:
3863 {
3864 int i;
3865
3866 new_tree = NULL_TREE;
3867
3868 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3869 {
3870 tree op = TREE_OPERAND (exp, i);
3871 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
3872 if (new_op != op)
3873 {
3874 if (!new_tree)
3875 new_tree = copy_node (exp);
3876 TREE_OPERAND (new_tree, i) = new_op;
3877 }
3878 }
3879
3880 if (new_tree)
3881 {
3882 new_tree = fold (new_tree);
3883 if (TREE_CODE (new_tree) == CALL_EXPR)
3884 process_call_operands (new_tree);
3885 }
3886 else
3887 return exp;
3888 }
3889 break;
3890
3891 default:
3892 gcc_unreachable ();
3893 }
3894
3895 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3896
3897 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3898 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3899
3900 return new_tree;
3901 }
3902 \f
3903
3904 /* Subroutine of stabilize_reference; this is called for subtrees of
3905 references. Any expression with side-effects must be put in a SAVE_EXPR
3906 to ensure that it is only evaluated once.
3907
3908 We don't put SAVE_EXPR nodes around everything, because assigning very
3909 simple expressions to temporaries causes us to miss good opportunities
3910 for optimizations. Among other things, the opportunity to fold in the
3911 addition of a constant into an addressing mode often gets lost, e.g.
3912 "y[i+1] += x;". In general, we take the approach that we should not make
3913 an assignment unless we are forced into it - i.e., that any non-side effect
3914 operator should be allowed, and that cse should take care of coalescing
3915 multiple utterances of the same expression should that prove fruitful. */
3916
3917 static tree
3918 stabilize_reference_1 (tree e)
3919 {
3920 tree result;
3921 enum tree_code code = TREE_CODE (e);
3922
3923 /* We cannot ignore const expressions because it might be a reference
3924 to a const array but whose index contains side-effects. But we can
3925 ignore things that are actual constant or that already have been
3926 handled by this function. */
3927
3928 if (tree_invariant_p (e))
3929 return e;
3930
3931 switch (TREE_CODE_CLASS (code))
3932 {
3933 case tcc_exceptional:
3934 case tcc_type:
3935 case tcc_declaration:
3936 case tcc_comparison:
3937 case tcc_statement:
3938 case tcc_expression:
3939 case tcc_reference:
3940 case tcc_vl_exp:
3941 /* If the expression has side-effects, then encase it in a SAVE_EXPR
3942 so that it will only be evaluated once. */
3943 /* The reference (r) and comparison (<) classes could be handled as
3944 below, but it is generally faster to only evaluate them once. */
3945 if (TREE_SIDE_EFFECTS (e))
3946 return save_expr (e);
3947 return e;
3948
3949 case tcc_constant:
3950 /* Constants need no processing. In fact, we should never reach
3951 here. */
3952 return e;
3953
3954 case tcc_binary:
3955 /* Division is slow and tends to be compiled with jumps,
3956 especially the division by powers of 2 that is often
3957 found inside of an array reference. So do it just once. */
3958 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
3959 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
3960 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
3961 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
3962 return save_expr (e);
3963 /* Recursively stabilize each operand. */
3964 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
3965 stabilize_reference_1 (TREE_OPERAND (e, 1)));
3966 break;
3967
3968 case tcc_unary:
3969 /* Recursively stabilize each operand. */
3970 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
3971 break;
3972
3973 default:
3974 gcc_unreachable ();
3975 }
3976
3977 TREE_TYPE (result) = TREE_TYPE (e);
3978 TREE_READONLY (result) = TREE_READONLY (e);
3979 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
3980 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
3981
3982 return result;
3983 }
3984
3985 /* Stabilize a reference so that we can use it any number of times
3986 without causing its operands to be evaluated more than once.
3987 Returns the stabilized reference. This works by means of save_expr,
3988 so see the caveats in the comments about save_expr.
3989
3990 Also allows conversion expressions whose operands are references.
3991 Any other kind of expression is returned unchanged. */
3992
3993 tree
3994 stabilize_reference (tree ref)
3995 {
3996 tree result;
3997 enum tree_code code = TREE_CODE (ref);
3998
3999 switch (code)
4000 {
4001 case VAR_DECL:
4002 case PARM_DECL:
4003 case RESULT_DECL:
4004 /* No action is needed in this case. */
4005 return ref;
4006
4007 CASE_CONVERT:
4008 case FLOAT_EXPR:
4009 case FIX_TRUNC_EXPR:
4010 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4011 break;
4012
4013 case INDIRECT_REF:
4014 result = build_nt (INDIRECT_REF,
4015 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4016 break;
4017
4018 case COMPONENT_REF:
4019 result = build_nt (COMPONENT_REF,
4020 stabilize_reference (TREE_OPERAND (ref, 0)),
4021 TREE_OPERAND (ref, 1), NULL_TREE);
4022 break;
4023
4024 case BIT_FIELD_REF:
4025 result = build_nt (BIT_FIELD_REF,
4026 stabilize_reference (TREE_OPERAND (ref, 0)),
4027 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4028 break;
4029
4030 case ARRAY_REF:
4031 result = build_nt (ARRAY_REF,
4032 stabilize_reference (TREE_OPERAND (ref, 0)),
4033 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4034 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4035 break;
4036
4037 case ARRAY_RANGE_REF:
4038 result = build_nt (ARRAY_RANGE_REF,
4039 stabilize_reference (TREE_OPERAND (ref, 0)),
4040 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4041 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4042 break;
4043
4044 case COMPOUND_EXPR:
4045 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4046 it wouldn't be ignored. This matters when dealing with
4047 volatiles. */
4048 return stabilize_reference_1 (ref);
4049
4050 /* If arg isn't a kind of lvalue we recognize, make no change.
4051 Caller should recognize the error for an invalid lvalue. */
4052 default:
4053 return ref;
4054
4055 case ERROR_MARK:
4056 return error_mark_node;
4057 }
4058
4059 TREE_TYPE (result) = TREE_TYPE (ref);
4060 TREE_READONLY (result) = TREE_READONLY (ref);
4061 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4062 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4063
4064 return result;
4065 }
4066 \f
4067 /* Low-level constructors for expressions. */
4068
4069 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4070 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4071
4072 void
4073 recompute_tree_invariant_for_addr_expr (tree t)
4074 {
4075 tree node;
4076 bool tc = true, se = false;
4077
4078 /* We started out assuming this address is both invariant and constant, but
4079 does not have side effects. Now go down any handled components and see if
4080 any of them involve offsets that are either non-constant or non-invariant.
4081 Also check for side-effects.
4082
4083 ??? Note that this code makes no attempt to deal with the case where
4084 taking the address of something causes a copy due to misalignment. */
4085
4086 #define UPDATE_FLAGS(NODE) \
4087 do { tree _node = (NODE); \
4088 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4089 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4090
4091 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4092 node = TREE_OPERAND (node, 0))
4093 {
4094 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4095 array reference (probably made temporarily by the G++ front end),
4096 so ignore all the operands. */
4097 if ((TREE_CODE (node) == ARRAY_REF
4098 || TREE_CODE (node) == ARRAY_RANGE_REF)
4099 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4100 {
4101 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4102 if (TREE_OPERAND (node, 2))
4103 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4104 if (TREE_OPERAND (node, 3))
4105 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4106 }
4107 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4108 FIELD_DECL, apparently. The G++ front end can put something else
4109 there, at least temporarily. */
4110 else if (TREE_CODE (node) == COMPONENT_REF
4111 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4112 {
4113 if (TREE_OPERAND (node, 2))
4114 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4115 }
4116 }
4117
4118 node = lang_hooks.expr_to_decl (node, &tc, &se);
4119
4120 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4121 the address, since &(*a)->b is a form of addition. If it's a constant, the
4122 address is constant too. If it's a decl, its address is constant if the
4123 decl is static. Everything else is not constant and, furthermore,
4124 taking the address of a volatile variable is not volatile. */
4125 if (TREE_CODE (node) == INDIRECT_REF
4126 || TREE_CODE (node) == MEM_REF)
4127 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4128 else if (CONSTANT_CLASS_P (node))
4129 ;
4130 else if (DECL_P (node))
4131 tc &= (staticp (node) != NULL_TREE);
4132 else
4133 {
4134 tc = false;
4135 se |= TREE_SIDE_EFFECTS (node);
4136 }
4137
4138
4139 TREE_CONSTANT (t) = tc;
4140 TREE_SIDE_EFFECTS (t) = se;
4141 #undef UPDATE_FLAGS
4142 }
4143
4144 /* Build an expression of code CODE, data type TYPE, and operands as
4145 specified. Expressions and reference nodes can be created this way.
4146 Constants, decls, types and misc nodes cannot be.
4147
4148 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4149 enough for all extant tree codes. */
4150
4151 tree
4152 build0_stat (enum tree_code code, tree tt MEM_STAT_DECL)
4153 {
4154 tree t;
4155
4156 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4157
4158 t = make_node_stat (code PASS_MEM_STAT);
4159 TREE_TYPE (t) = tt;
4160
4161 return t;
4162 }
4163
4164 tree
4165 build1_stat (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4166 {
4167 int length = sizeof (struct tree_exp);
4168 tree t;
4169
4170 record_node_allocation_statistics (code, length);
4171
4172 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4173
4174 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4175
4176 memset (t, 0, sizeof (struct tree_common));
4177
4178 TREE_SET_CODE (t, code);
4179
4180 TREE_TYPE (t) = type;
4181 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4182 TREE_OPERAND (t, 0) = node;
4183 if (node && !TYPE_P (node))
4184 {
4185 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4186 TREE_READONLY (t) = TREE_READONLY (node);
4187 }
4188
4189 if (TREE_CODE_CLASS (code) == tcc_statement)
4190 TREE_SIDE_EFFECTS (t) = 1;
4191 else switch (code)
4192 {
4193 case VA_ARG_EXPR:
4194 /* All of these have side-effects, no matter what their
4195 operands are. */
4196 TREE_SIDE_EFFECTS (t) = 1;
4197 TREE_READONLY (t) = 0;
4198 break;
4199
4200 case INDIRECT_REF:
4201 /* Whether a dereference is readonly has nothing to do with whether
4202 its operand is readonly. */
4203 TREE_READONLY (t) = 0;
4204 break;
4205
4206 case ADDR_EXPR:
4207 if (node)
4208 recompute_tree_invariant_for_addr_expr (t);
4209 break;
4210
4211 default:
4212 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4213 && node && !TYPE_P (node)
4214 && TREE_CONSTANT (node))
4215 TREE_CONSTANT (t) = 1;
4216 if (TREE_CODE_CLASS (code) == tcc_reference
4217 && node && TREE_THIS_VOLATILE (node))
4218 TREE_THIS_VOLATILE (t) = 1;
4219 break;
4220 }
4221
4222 return t;
4223 }
4224
4225 #define PROCESS_ARG(N) \
4226 do { \
4227 TREE_OPERAND (t, N) = arg##N; \
4228 if (arg##N &&!TYPE_P (arg##N)) \
4229 { \
4230 if (TREE_SIDE_EFFECTS (arg##N)) \
4231 side_effects = 1; \
4232 if (!TREE_READONLY (arg##N) \
4233 && !CONSTANT_CLASS_P (arg##N)) \
4234 (void) (read_only = 0); \
4235 if (!TREE_CONSTANT (arg##N)) \
4236 (void) (constant = 0); \
4237 } \
4238 } while (0)
4239
4240 tree
4241 build2_stat (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4242 {
4243 bool constant, read_only, side_effects;
4244 tree t;
4245
4246 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4247
4248 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4249 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4250 /* When sizetype precision doesn't match that of pointers
4251 we need to be able to build explicit extensions or truncations
4252 of the offset argument. */
4253 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4254 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4255 && TREE_CODE (arg1) == INTEGER_CST);
4256
4257 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4258 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4259 && ptrofftype_p (TREE_TYPE (arg1)));
4260
4261 t = make_node_stat (code PASS_MEM_STAT);
4262 TREE_TYPE (t) = tt;
4263
4264 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4265 result based on those same flags for the arguments. But if the
4266 arguments aren't really even `tree' expressions, we shouldn't be trying
4267 to do this. */
4268
4269 /* Expressions without side effects may be constant if their
4270 arguments are as well. */
4271 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4272 || TREE_CODE_CLASS (code) == tcc_binary);
4273 read_only = 1;
4274 side_effects = TREE_SIDE_EFFECTS (t);
4275
4276 PROCESS_ARG (0);
4277 PROCESS_ARG (1);
4278
4279 TREE_READONLY (t) = read_only;
4280 TREE_CONSTANT (t) = constant;
4281 TREE_SIDE_EFFECTS (t) = side_effects;
4282 TREE_THIS_VOLATILE (t)
4283 = (TREE_CODE_CLASS (code) == tcc_reference
4284 && arg0 && TREE_THIS_VOLATILE (arg0));
4285
4286 return t;
4287 }
4288
4289
4290 tree
4291 build3_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4292 tree arg2 MEM_STAT_DECL)
4293 {
4294 bool constant, read_only, side_effects;
4295 tree t;
4296
4297 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4298 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4299
4300 t = make_node_stat (code PASS_MEM_STAT);
4301 TREE_TYPE (t) = tt;
4302
4303 read_only = 1;
4304
4305 /* As a special exception, if COND_EXPR has NULL branches, we
4306 assume that it is a gimple statement and always consider
4307 it to have side effects. */
4308 if (code == COND_EXPR
4309 && tt == void_type_node
4310 && arg1 == NULL_TREE
4311 && arg2 == NULL_TREE)
4312 side_effects = true;
4313 else
4314 side_effects = TREE_SIDE_EFFECTS (t);
4315
4316 PROCESS_ARG (0);
4317 PROCESS_ARG (1);
4318 PROCESS_ARG (2);
4319
4320 if (code == COND_EXPR)
4321 TREE_READONLY (t) = read_only;
4322
4323 TREE_SIDE_EFFECTS (t) = side_effects;
4324 TREE_THIS_VOLATILE (t)
4325 = (TREE_CODE_CLASS (code) == tcc_reference
4326 && arg0 && TREE_THIS_VOLATILE (arg0));
4327
4328 return t;
4329 }
4330
4331 tree
4332 build4_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4333 tree arg2, tree arg3 MEM_STAT_DECL)
4334 {
4335 bool constant, read_only, side_effects;
4336 tree t;
4337
4338 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4339
4340 t = make_node_stat (code PASS_MEM_STAT);
4341 TREE_TYPE (t) = tt;
4342
4343 side_effects = TREE_SIDE_EFFECTS (t);
4344
4345 PROCESS_ARG (0);
4346 PROCESS_ARG (1);
4347 PROCESS_ARG (2);
4348 PROCESS_ARG (3);
4349
4350 TREE_SIDE_EFFECTS (t) = side_effects;
4351 TREE_THIS_VOLATILE (t)
4352 = (TREE_CODE_CLASS (code) == tcc_reference
4353 && arg0 && TREE_THIS_VOLATILE (arg0));
4354
4355 return t;
4356 }
4357
4358 tree
4359 build5_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4360 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4361 {
4362 bool constant, read_only, side_effects;
4363 tree t;
4364
4365 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4366
4367 t = make_node_stat (code PASS_MEM_STAT);
4368 TREE_TYPE (t) = tt;
4369
4370 side_effects = TREE_SIDE_EFFECTS (t);
4371
4372 PROCESS_ARG (0);
4373 PROCESS_ARG (1);
4374 PROCESS_ARG (2);
4375 PROCESS_ARG (3);
4376 PROCESS_ARG (4);
4377
4378 TREE_SIDE_EFFECTS (t) = side_effects;
4379 TREE_THIS_VOLATILE (t)
4380 = (TREE_CODE_CLASS (code) == tcc_reference
4381 && arg0 && TREE_THIS_VOLATILE (arg0));
4382
4383 return t;
4384 }
4385
4386 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4387 on the pointer PTR. */
4388
4389 tree
4390 build_simple_mem_ref_loc (location_t loc, tree ptr)
4391 {
4392 HOST_WIDE_INT offset = 0;
4393 tree ptype = TREE_TYPE (ptr);
4394 tree tem;
4395 /* For convenience allow addresses that collapse to a simple base
4396 and offset. */
4397 if (TREE_CODE (ptr) == ADDR_EXPR
4398 && (handled_component_p (TREE_OPERAND (ptr, 0))
4399 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4400 {
4401 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4402 gcc_assert (ptr);
4403 ptr = build_fold_addr_expr (ptr);
4404 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4405 }
4406 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4407 ptr, build_int_cst (ptype, offset));
4408 SET_EXPR_LOCATION (tem, loc);
4409 return tem;
4410 }
4411
4412 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4413
4414 offset_int
4415 mem_ref_offset (const_tree t)
4416 {
4417 return offset_int::from (TREE_OPERAND (t, 1), SIGNED);
4418 }
4419
4420 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4421 offsetted by OFFSET units. */
4422
4423 tree
4424 build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
4425 {
4426 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4427 build_fold_addr_expr (base),
4428 build_int_cst (ptr_type_node, offset));
4429 tree addr = build1 (ADDR_EXPR, type, ref);
4430 recompute_tree_invariant_for_addr_expr (addr);
4431 return addr;
4432 }
4433
4434 /* Similar except don't specify the TREE_TYPE
4435 and leave the TREE_SIDE_EFFECTS as 0.
4436 It is permissible for arguments to be null,
4437 or even garbage if their values do not matter. */
4438
4439 tree
4440 build_nt (enum tree_code code, ...)
4441 {
4442 tree t;
4443 int length;
4444 int i;
4445 va_list p;
4446
4447 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4448
4449 va_start (p, code);
4450
4451 t = make_node (code);
4452 length = TREE_CODE_LENGTH (code);
4453
4454 for (i = 0; i < length; i++)
4455 TREE_OPERAND (t, i) = va_arg (p, tree);
4456
4457 va_end (p);
4458 return t;
4459 }
4460
4461 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4462 tree vec. */
4463
4464 tree
4465 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4466 {
4467 tree ret, t;
4468 unsigned int ix;
4469
4470 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4471 CALL_EXPR_FN (ret) = fn;
4472 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4473 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4474 CALL_EXPR_ARG (ret, ix) = t;
4475 return ret;
4476 }
4477 \f
4478 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4479 We do NOT enter this node in any sort of symbol table.
4480
4481 LOC is the location of the decl.
4482
4483 layout_decl is used to set up the decl's storage layout.
4484 Other slots are initialized to 0 or null pointers. */
4485
4486 tree
4487 build_decl_stat (location_t loc, enum tree_code code, tree name,
4488 tree type MEM_STAT_DECL)
4489 {
4490 tree t;
4491
4492 t = make_node_stat (code PASS_MEM_STAT);
4493 DECL_SOURCE_LOCATION (t) = loc;
4494
4495 /* if (type == error_mark_node)
4496 type = integer_type_node; */
4497 /* That is not done, deliberately, so that having error_mark_node
4498 as the type can suppress useless errors in the use of this variable. */
4499
4500 DECL_NAME (t) = name;
4501 TREE_TYPE (t) = type;
4502
4503 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4504 layout_decl (t, 0);
4505
4506 return t;
4507 }
4508
4509 /* Builds and returns function declaration with NAME and TYPE. */
4510
4511 tree
4512 build_fn_decl (const char *name, tree type)
4513 {
4514 tree id = get_identifier (name);
4515 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4516
4517 DECL_EXTERNAL (decl) = 1;
4518 TREE_PUBLIC (decl) = 1;
4519 DECL_ARTIFICIAL (decl) = 1;
4520 TREE_NOTHROW (decl) = 1;
4521
4522 return decl;
4523 }
4524
4525 vec<tree, va_gc> *all_translation_units;
4526
4527 /* Builds a new translation-unit decl with name NAME, queues it in the
4528 global list of translation-unit decls and returns it. */
4529
4530 tree
4531 build_translation_unit_decl (tree name)
4532 {
4533 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
4534 name, NULL_TREE);
4535 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
4536 vec_safe_push (all_translation_units, tu);
4537 return tu;
4538 }
4539
4540 \f
4541 /* BLOCK nodes are used to represent the structure of binding contours
4542 and declarations, once those contours have been exited and their contents
4543 compiled. This information is used for outputting debugging info. */
4544
4545 tree
4546 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
4547 {
4548 tree block = make_node (BLOCK);
4549
4550 BLOCK_VARS (block) = vars;
4551 BLOCK_SUBBLOCKS (block) = subblocks;
4552 BLOCK_SUPERCONTEXT (block) = supercontext;
4553 BLOCK_CHAIN (block) = chain;
4554 return block;
4555 }
4556
4557 \f
4558 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4559
4560 LOC is the location to use in tree T. */
4561
4562 void
4563 protected_set_expr_location (tree t, location_t loc)
4564 {
4565 if (t && CAN_HAVE_LOCATION_P (t))
4566 SET_EXPR_LOCATION (t, loc);
4567 }
4568 \f
4569 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES
4570 is ATTRIBUTE. */
4571
4572 tree
4573 build_decl_attribute_variant (tree ddecl, tree attribute)
4574 {
4575 DECL_ATTRIBUTES (ddecl) = attribute;
4576 return ddecl;
4577 }
4578
4579 /* Borrowed from hashtab.c iterative_hash implementation. */
4580 #define mix(a,b,c) \
4581 { \
4582 a -= b; a -= c; a ^= (c>>13); \
4583 b -= c; b -= a; b ^= (a<< 8); \
4584 c -= a; c -= b; c ^= ((b&0xffffffff)>>13); \
4585 a -= b; a -= c; a ^= ((c&0xffffffff)>>12); \
4586 b -= c; b -= a; b = (b ^ (a<<16)) & 0xffffffff; \
4587 c -= a; c -= b; c = (c ^ (b>> 5)) & 0xffffffff; \
4588 a -= b; a -= c; a = (a ^ (c>> 3)) & 0xffffffff; \
4589 b -= c; b -= a; b = (b ^ (a<<10)) & 0xffffffff; \
4590 c -= a; c -= b; c = (c ^ (b>>15)) & 0xffffffff; \
4591 }
4592
4593
4594 /* Produce good hash value combining VAL and VAL2. */
4595 hashval_t
4596 iterative_hash_hashval_t (hashval_t val, hashval_t val2)
4597 {
4598 /* the golden ratio; an arbitrary value. */
4599 hashval_t a = 0x9e3779b9;
4600
4601 mix (a, val, val2);
4602 return val2;
4603 }
4604
4605 /* Produce good hash value combining VAL and VAL2. */
4606 hashval_t
4607 iterative_hash_host_wide_int (HOST_WIDE_INT val, hashval_t val2)
4608 {
4609 if (sizeof (HOST_WIDE_INT) == sizeof (hashval_t))
4610 return iterative_hash_hashval_t (val, val2);
4611 else
4612 {
4613 hashval_t a = (hashval_t) val;
4614 /* Avoid warnings about shifting of more than the width of the type on
4615 hosts that won't execute this path. */
4616 int zero = 0;
4617 hashval_t b = (hashval_t) (val >> (sizeof (hashval_t) * 8 + zero));
4618 mix (a, b, val2);
4619 if (sizeof (HOST_WIDE_INT) > 2 * sizeof (hashval_t))
4620 {
4621 hashval_t a = (hashval_t) (val >> (sizeof (hashval_t) * 16 + zero));
4622 hashval_t b = (hashval_t) (val >> (sizeof (hashval_t) * 24 + zero));
4623 mix (a, b, val2);
4624 }
4625 return val2;
4626 }
4627 }
4628
4629 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4630 is ATTRIBUTE and its qualifiers are QUALS.
4631
4632 Record such modified types already made so we don't make duplicates. */
4633
4634 tree
4635 build_type_attribute_qual_variant (tree ttype, tree attribute, int quals)
4636 {
4637 if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype), attribute))
4638 {
4639 hashval_t hashcode = 0;
4640 tree ntype;
4641 int i;
4642 tree t;
4643 enum tree_code code = TREE_CODE (ttype);
4644
4645 /* Building a distinct copy of a tagged type is inappropriate; it
4646 causes breakage in code that expects there to be a one-to-one
4647 relationship between a struct and its fields.
4648 build_duplicate_type is another solution (as used in
4649 handle_transparent_union_attribute), but that doesn't play well
4650 with the stronger C++ type identity model. */
4651 if (TREE_CODE (ttype) == RECORD_TYPE
4652 || TREE_CODE (ttype) == UNION_TYPE
4653 || TREE_CODE (ttype) == QUAL_UNION_TYPE
4654 || TREE_CODE (ttype) == ENUMERAL_TYPE)
4655 {
4656 warning (OPT_Wattributes,
4657 "ignoring attributes applied to %qT after definition",
4658 TYPE_MAIN_VARIANT (ttype));
4659 return build_qualified_type (ttype, quals);
4660 }
4661
4662 ttype = build_qualified_type (ttype, TYPE_UNQUALIFIED);
4663 ntype = build_distinct_type_copy (ttype);
4664
4665 TYPE_ATTRIBUTES (ntype) = attribute;
4666
4667 hashcode = iterative_hash_object (code, hashcode);
4668 if (TREE_TYPE (ntype))
4669 hashcode = iterative_hash_object (TYPE_HASH (TREE_TYPE (ntype)),
4670 hashcode);
4671 hashcode = attribute_hash_list (attribute, hashcode);
4672
4673 switch (TREE_CODE (ntype))
4674 {
4675 case FUNCTION_TYPE:
4676 hashcode = type_hash_list (TYPE_ARG_TYPES (ntype), hashcode);
4677 break;
4678 case ARRAY_TYPE:
4679 if (TYPE_DOMAIN (ntype))
4680 hashcode = iterative_hash_object (TYPE_HASH (TYPE_DOMAIN (ntype)),
4681 hashcode);
4682 break;
4683 case INTEGER_TYPE:
4684 t = TYPE_MAX_VALUE (ntype);
4685 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
4686 hashcode = iterative_hash_object (TREE_INT_CST_ELT (t, i), hashcode);
4687 break;
4688 case REAL_TYPE:
4689 case FIXED_POINT_TYPE:
4690 {
4691 unsigned int precision = TYPE_PRECISION (ntype);
4692 hashcode = iterative_hash_object (precision, hashcode);
4693 }
4694 break;
4695 default:
4696 break;
4697 }
4698
4699 ntype = type_hash_canon (hashcode, ntype);
4700
4701 /* If the target-dependent attributes make NTYPE different from
4702 its canonical type, we will need to use structural equality
4703 checks for this type. */
4704 if (TYPE_STRUCTURAL_EQUALITY_P (ttype)
4705 || !comp_type_attributes (ntype, ttype))
4706 SET_TYPE_STRUCTURAL_EQUALITY (ntype);
4707 else if (TYPE_CANONICAL (ntype) == ntype)
4708 TYPE_CANONICAL (ntype) = TYPE_CANONICAL (ttype);
4709
4710 ttype = build_qualified_type (ntype, quals);
4711 }
4712 else if (TYPE_QUALS (ttype) != quals)
4713 ttype = build_qualified_type (ttype, quals);
4714
4715 return ttype;
4716 }
4717
4718 /* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are
4719 the same. */
4720
4721 static bool
4722 omp_declare_simd_clauses_equal (tree clauses1, tree clauses2)
4723 {
4724 tree cl1, cl2;
4725 for (cl1 = clauses1, cl2 = clauses2;
4726 cl1 && cl2;
4727 cl1 = OMP_CLAUSE_CHAIN (cl1), cl2 = OMP_CLAUSE_CHAIN (cl2))
4728 {
4729 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_CODE (cl2))
4730 return false;
4731 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_SIMDLEN)
4732 {
4733 if (simple_cst_equal (OMP_CLAUSE_DECL (cl1),
4734 OMP_CLAUSE_DECL (cl2)) != 1)
4735 return false;
4736 }
4737 switch (OMP_CLAUSE_CODE (cl1))
4738 {
4739 case OMP_CLAUSE_ALIGNED:
4740 if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1),
4741 OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2)) != 1)
4742 return false;
4743 break;
4744 case OMP_CLAUSE_LINEAR:
4745 if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1),
4746 OMP_CLAUSE_LINEAR_STEP (cl2)) != 1)
4747 return false;
4748 break;
4749 case OMP_CLAUSE_SIMDLEN:
4750 if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1),
4751 OMP_CLAUSE_SIMDLEN_EXPR (cl2)) != 1)
4752 return false;
4753 default:
4754 break;
4755 }
4756 }
4757 return true;
4758 }
4759
4760 /* Compare two constructor-element-type constants. Return 1 if the lists
4761 are known to be equal; otherwise return 0. */
4762
4763 static bool
4764 simple_cst_list_equal (const_tree l1, const_tree l2)
4765 {
4766 while (l1 != NULL_TREE && l2 != NULL_TREE)
4767 {
4768 if (simple_cst_equal (TREE_VALUE (l1), TREE_VALUE (l2)) != 1)
4769 return false;
4770
4771 l1 = TREE_CHAIN (l1);
4772 l2 = TREE_CHAIN (l2);
4773 }
4774
4775 return l1 == l2;
4776 }
4777
4778 /* Compare two attributes for their value identity. Return true if the
4779 attribute values are known to be equal; otherwise return false.
4780 */
4781
4782 static bool
4783 attribute_value_equal (const_tree attr1, const_tree attr2)
4784 {
4785 if (TREE_VALUE (attr1) == TREE_VALUE (attr2))
4786 return true;
4787
4788 if (TREE_VALUE (attr1) != NULL_TREE
4789 && TREE_CODE (TREE_VALUE (attr1)) == TREE_LIST
4790 && TREE_VALUE (attr2) != NULL
4791 && TREE_CODE (TREE_VALUE (attr2)) == TREE_LIST)
4792 return (simple_cst_list_equal (TREE_VALUE (attr1),
4793 TREE_VALUE (attr2)) == 1);
4794
4795 if ((flag_openmp || flag_openmp_simd)
4796 && TREE_VALUE (attr1) && TREE_VALUE (attr2)
4797 && TREE_CODE (TREE_VALUE (attr1)) == OMP_CLAUSE
4798 && TREE_CODE (TREE_VALUE (attr2)) == OMP_CLAUSE)
4799 return omp_declare_simd_clauses_equal (TREE_VALUE (attr1),
4800 TREE_VALUE (attr2));
4801
4802 return (simple_cst_equal (TREE_VALUE (attr1), TREE_VALUE (attr2)) == 1);
4803 }
4804
4805 /* Return 0 if the attributes for two types are incompatible, 1 if they
4806 are compatible, and 2 if they are nearly compatible (which causes a
4807 warning to be generated). */
4808 int
4809 comp_type_attributes (const_tree type1, const_tree type2)
4810 {
4811 const_tree a1 = TYPE_ATTRIBUTES (type1);
4812 const_tree a2 = TYPE_ATTRIBUTES (type2);
4813 const_tree a;
4814
4815 if (a1 == a2)
4816 return 1;
4817 for (a = a1; a != NULL_TREE; a = TREE_CHAIN (a))
4818 {
4819 const struct attribute_spec *as;
4820 const_tree attr;
4821
4822 as = lookup_attribute_spec (get_attribute_name (a));
4823 if (!as || as->affects_type_identity == false)
4824 continue;
4825
4826 attr = lookup_attribute (as->name, CONST_CAST_TREE (a2));
4827 if (!attr || !attribute_value_equal (a, attr))
4828 break;
4829 }
4830 if (!a)
4831 {
4832 for (a = a2; a != NULL_TREE; a = TREE_CHAIN (a))
4833 {
4834 const struct attribute_spec *as;
4835
4836 as = lookup_attribute_spec (get_attribute_name (a));
4837 if (!as || as->affects_type_identity == false)
4838 continue;
4839
4840 if (!lookup_attribute (as->name, CONST_CAST_TREE (a1)))
4841 break;
4842 /* We don't need to compare trees again, as we did this
4843 already in first loop. */
4844 }
4845 /* All types - affecting identity - are equal, so
4846 there is no need to call target hook for comparison. */
4847 if (!a)
4848 return 1;
4849 }
4850 /* As some type combinations - like default calling-convention - might
4851 be compatible, we have to call the target hook to get the final result. */
4852 return targetm.comp_type_attributes (type1, type2);
4853 }
4854
4855 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4856 is ATTRIBUTE.
4857
4858 Record such modified types already made so we don't make duplicates. */
4859
4860 tree
4861 build_type_attribute_variant (tree ttype, tree attribute)
4862 {
4863 return build_type_attribute_qual_variant (ttype, attribute,
4864 TYPE_QUALS (ttype));
4865 }
4866
4867
4868 /* Reset the expression *EXPR_P, a size or position.
4869
4870 ??? We could reset all non-constant sizes or positions. But it's cheap
4871 enough to not do so and refrain from adding workarounds to dwarf2out.c.
4872
4873 We need to reset self-referential sizes or positions because they cannot
4874 be gimplified and thus can contain a CALL_EXPR after the gimplification
4875 is finished, which will run afoul of LTO streaming. And they need to be
4876 reset to something essentially dummy but not constant, so as to preserve
4877 the properties of the object they are attached to. */
4878
4879 static inline void
4880 free_lang_data_in_one_sizepos (tree *expr_p)
4881 {
4882 tree expr = *expr_p;
4883 if (CONTAINS_PLACEHOLDER_P (expr))
4884 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
4885 }
4886
4887
4888 /* Reset all the fields in a binfo node BINFO. We only keep
4889 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
4890
4891 static void
4892 free_lang_data_in_binfo (tree binfo)
4893 {
4894 unsigned i;
4895 tree t;
4896
4897 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
4898
4899 BINFO_VIRTUALS (binfo) = NULL_TREE;
4900 BINFO_BASE_ACCESSES (binfo) = NULL;
4901 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
4902 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
4903
4904 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
4905 free_lang_data_in_binfo (t);
4906 }
4907
4908
4909 /* Reset all language specific information still present in TYPE. */
4910
4911 static void
4912 free_lang_data_in_type (tree type)
4913 {
4914 gcc_assert (TYPE_P (type));
4915
4916 /* Give the FE a chance to remove its own data first. */
4917 lang_hooks.free_lang_data (type);
4918
4919 TREE_LANG_FLAG_0 (type) = 0;
4920 TREE_LANG_FLAG_1 (type) = 0;
4921 TREE_LANG_FLAG_2 (type) = 0;
4922 TREE_LANG_FLAG_3 (type) = 0;
4923 TREE_LANG_FLAG_4 (type) = 0;
4924 TREE_LANG_FLAG_5 (type) = 0;
4925 TREE_LANG_FLAG_6 (type) = 0;
4926
4927 if (TREE_CODE (type) == FUNCTION_TYPE)
4928 {
4929 /* Remove the const and volatile qualifiers from arguments. The
4930 C++ front end removes them, but the C front end does not,
4931 leading to false ODR violation errors when merging two
4932 instances of the same function signature compiled by
4933 different front ends. */
4934 tree p;
4935
4936 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
4937 {
4938 tree arg_type = TREE_VALUE (p);
4939
4940 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
4941 {
4942 int quals = TYPE_QUALS (arg_type)
4943 & ~TYPE_QUAL_CONST
4944 & ~TYPE_QUAL_VOLATILE;
4945 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
4946 free_lang_data_in_type (TREE_VALUE (p));
4947 }
4948 }
4949 }
4950
4951 /* Remove members that are not actually FIELD_DECLs from the field
4952 list of an aggregate. These occur in C++. */
4953 if (RECORD_OR_UNION_TYPE_P (type))
4954 {
4955 tree prev, member;
4956
4957 /* Note that TYPE_FIELDS can be shared across distinct
4958 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is
4959 to be removed, we cannot set its TREE_CHAIN to NULL.
4960 Otherwise, we would not be able to find all the other fields
4961 in the other instances of this TREE_TYPE.
4962
4963 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */
4964 prev = NULL_TREE;
4965 member = TYPE_FIELDS (type);
4966 while (member)
4967 {
4968 if (TREE_CODE (member) == FIELD_DECL
4969 || TREE_CODE (member) == TYPE_DECL)
4970 {
4971 if (prev)
4972 TREE_CHAIN (prev) = member;
4973 else
4974 TYPE_FIELDS (type) = member;
4975 prev = member;
4976 }
4977
4978 member = TREE_CHAIN (member);
4979 }
4980
4981 if (prev)
4982 TREE_CHAIN (prev) = NULL_TREE;
4983 else
4984 TYPE_FIELDS (type) = NULL_TREE;
4985
4986 TYPE_METHODS (type) = NULL_TREE;
4987 if (TYPE_BINFO (type))
4988 free_lang_data_in_binfo (TYPE_BINFO (type));
4989 }
4990 else
4991 {
4992 /* For non-aggregate types, clear out the language slot (which
4993 overloads TYPE_BINFO). */
4994 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
4995
4996 if (INTEGRAL_TYPE_P (type)
4997 || SCALAR_FLOAT_TYPE_P (type)
4998 || FIXED_POINT_TYPE_P (type))
4999 {
5000 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
5001 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
5002 }
5003 }
5004
5005 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
5006 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
5007
5008 if (TYPE_CONTEXT (type)
5009 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
5010 {
5011 tree ctx = TYPE_CONTEXT (type);
5012 do
5013 {
5014 ctx = BLOCK_SUPERCONTEXT (ctx);
5015 }
5016 while (ctx && TREE_CODE (ctx) == BLOCK);
5017 TYPE_CONTEXT (type) = ctx;
5018 }
5019 }
5020
5021
5022 /* Return true if DECL may need an assembler name to be set. */
5023
5024 static inline bool
5025 need_assembler_name_p (tree decl)
5026 {
5027 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5028 if (TREE_CODE (decl) != FUNCTION_DECL
5029 && TREE_CODE (decl) != VAR_DECL)
5030 return false;
5031
5032 /* If DECL already has its assembler name set, it does not need a
5033 new one. */
5034 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5035 || DECL_ASSEMBLER_NAME_SET_P (decl))
5036 return false;
5037
5038 /* Abstract decls do not need an assembler name. */
5039 if (DECL_ABSTRACT (decl))
5040 return false;
5041
5042 /* For VAR_DECLs, only static, public and external symbols need an
5043 assembler name. */
5044 if (TREE_CODE (decl) == VAR_DECL
5045 && !TREE_STATIC (decl)
5046 && !TREE_PUBLIC (decl)
5047 && !DECL_EXTERNAL (decl))
5048 return false;
5049
5050 if (TREE_CODE (decl) == FUNCTION_DECL)
5051 {
5052 /* Do not set assembler name on builtins. Allow RTL expansion to
5053 decide whether to expand inline or via a regular call. */
5054 if (DECL_BUILT_IN (decl)
5055 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5056 return false;
5057
5058 /* Functions represented in the callgraph need an assembler name. */
5059 if (cgraph_get_node (decl) != NULL)
5060 return true;
5061
5062 /* Unused and not public functions don't need an assembler name. */
5063 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5064 return false;
5065 }
5066
5067 return true;
5068 }
5069
5070
5071 /* Reset all language specific information still present in symbol
5072 DECL. */
5073
5074 static void
5075 free_lang_data_in_decl (tree decl)
5076 {
5077 gcc_assert (DECL_P (decl));
5078
5079 /* Give the FE a chance to remove its own data first. */
5080 lang_hooks.free_lang_data (decl);
5081
5082 TREE_LANG_FLAG_0 (decl) = 0;
5083 TREE_LANG_FLAG_1 (decl) = 0;
5084 TREE_LANG_FLAG_2 (decl) = 0;
5085 TREE_LANG_FLAG_3 (decl) = 0;
5086 TREE_LANG_FLAG_4 (decl) = 0;
5087 TREE_LANG_FLAG_5 (decl) = 0;
5088 TREE_LANG_FLAG_6 (decl) = 0;
5089
5090 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5091 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5092 if (TREE_CODE (decl) == FIELD_DECL)
5093 {
5094 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5095 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5096 DECL_QUALIFIER (decl) = NULL_TREE;
5097 }
5098
5099 if (TREE_CODE (decl) == FUNCTION_DECL)
5100 {
5101 struct cgraph_node *node;
5102 if (!(node = cgraph_get_node (decl))
5103 || (!node->definition && !node->clones))
5104 {
5105 if (node)
5106 cgraph_release_function_body (node);
5107 else
5108 {
5109 release_function_body (decl);
5110 DECL_ARGUMENTS (decl) = NULL;
5111 DECL_RESULT (decl) = NULL;
5112 DECL_INITIAL (decl) = error_mark_node;
5113 }
5114 }
5115 if (gimple_has_body_p (decl))
5116 {
5117 tree t;
5118
5119 /* If DECL has a gimple body, then the context for its
5120 arguments must be DECL. Otherwise, it doesn't really
5121 matter, as we will not be emitting any code for DECL. In
5122 general, there may be other instances of DECL created by
5123 the front end and since PARM_DECLs are generally shared,
5124 their DECL_CONTEXT changes as the replicas of DECL are
5125 created. The only time where DECL_CONTEXT is important
5126 is for the FUNCTION_DECLs that have a gimple body (since
5127 the PARM_DECL will be used in the function's body). */
5128 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5129 DECL_CONTEXT (t) = decl;
5130 }
5131
5132 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5133 At this point, it is not needed anymore. */
5134 DECL_SAVED_TREE (decl) = NULL_TREE;
5135
5136 /* Clear the abstract origin if it refers to a method. Otherwise
5137 dwarf2out.c will ICE as we clear TYPE_METHODS and thus the
5138 origin will not be output correctly. */
5139 if (DECL_ABSTRACT_ORIGIN (decl)
5140 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5141 && RECORD_OR_UNION_TYPE_P
5142 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5143 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5144
5145 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5146 DECL_VINDEX referring to itself into a vtable slot number as it
5147 should. Happens with functions that are copied and then forgotten
5148 about. Just clear it, it won't matter anymore. */
5149 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl)))
5150 DECL_VINDEX (decl) = NULL_TREE;
5151 }
5152 else if (TREE_CODE (decl) == VAR_DECL)
5153 {
5154 if ((DECL_EXTERNAL (decl)
5155 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5156 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5157 DECL_INITIAL (decl) = NULL_TREE;
5158 }
5159 else if (TREE_CODE (decl) == TYPE_DECL
5160 || TREE_CODE (decl) == FIELD_DECL)
5161 DECL_INITIAL (decl) = NULL_TREE;
5162 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5163 && DECL_INITIAL (decl)
5164 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5165 {
5166 /* Strip builtins from the translation-unit BLOCK. We still have targets
5167 without builtin_decl_explicit support and also builtins are shared
5168 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5169 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5170 while (*nextp)
5171 {
5172 tree var = *nextp;
5173 if (TREE_CODE (var) == FUNCTION_DECL
5174 && DECL_BUILT_IN (var))
5175 *nextp = TREE_CHAIN (var);
5176 else
5177 nextp = &TREE_CHAIN (var);
5178 }
5179 }
5180 }
5181
5182
5183 /* Data used when collecting DECLs and TYPEs for language data removal. */
5184
5185 struct free_lang_data_d
5186 {
5187 /* Worklist to avoid excessive recursion. */
5188 vec<tree> worklist;
5189
5190 /* Set of traversed objects. Used to avoid duplicate visits. */
5191 struct pointer_set_t *pset;
5192
5193 /* Array of symbols to process with free_lang_data_in_decl. */
5194 vec<tree> decls;
5195
5196 /* Array of types to process with free_lang_data_in_type. */
5197 vec<tree> types;
5198 };
5199
5200
5201 /* Save all language fields needed to generate proper debug information
5202 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5203
5204 static void
5205 save_debug_info_for_decl (tree t)
5206 {
5207 /*struct saved_debug_info_d *sdi;*/
5208
5209 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t));
5210
5211 /* FIXME. Partial implementation for saving debug info removed. */
5212 }
5213
5214
5215 /* Save all language fields needed to generate proper debug information
5216 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5217
5218 static void
5219 save_debug_info_for_type (tree t)
5220 {
5221 /*struct saved_debug_info_d *sdi;*/
5222
5223 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t));
5224
5225 /* FIXME. Partial implementation for saving debug info removed. */
5226 }
5227
5228
5229 /* Add type or decl T to one of the list of tree nodes that need their
5230 language data removed. The lists are held inside FLD. */
5231
5232 static void
5233 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5234 {
5235 if (DECL_P (t))
5236 {
5237 fld->decls.safe_push (t);
5238 if (debug_info_level > DINFO_LEVEL_TERSE)
5239 save_debug_info_for_decl (t);
5240 }
5241 else if (TYPE_P (t))
5242 {
5243 fld->types.safe_push (t);
5244 if (debug_info_level > DINFO_LEVEL_TERSE)
5245 save_debug_info_for_type (t);
5246 }
5247 else
5248 gcc_unreachable ();
5249 }
5250
5251 /* Push tree node T into FLD->WORKLIST. */
5252
5253 static inline void
5254 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5255 {
5256 if (t && !is_lang_specific (t) && !pointer_set_contains (fld->pset, t))
5257 fld->worklist.safe_push ((t));
5258 }
5259
5260
5261 /* Operand callback helper for free_lang_data_in_node. *TP is the
5262 subtree operand being considered. */
5263
5264 static tree
5265 find_decls_types_r (tree *tp, int *ws, void *data)
5266 {
5267 tree t = *tp;
5268 struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5269
5270 if (TREE_CODE (t) == TREE_LIST)
5271 return NULL_TREE;
5272
5273 /* Language specific nodes will be removed, so there is no need
5274 to gather anything under them. */
5275 if (is_lang_specific (t))
5276 {
5277 *ws = 0;
5278 return NULL_TREE;
5279 }
5280
5281 if (DECL_P (t))
5282 {
5283 /* Note that walk_tree does not traverse every possible field in
5284 decls, so we have to do our own traversals here. */
5285 add_tree_to_fld_list (t, fld);
5286
5287 fld_worklist_push (DECL_NAME (t), fld);
5288 fld_worklist_push (DECL_CONTEXT (t), fld);
5289 fld_worklist_push (DECL_SIZE (t), fld);
5290 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5291
5292 /* We are going to remove everything under DECL_INITIAL for
5293 TYPE_DECLs. No point walking them. */
5294 if (TREE_CODE (t) != TYPE_DECL)
5295 fld_worklist_push (DECL_INITIAL (t), fld);
5296
5297 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5298 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5299
5300 if (TREE_CODE (t) == FUNCTION_DECL)
5301 {
5302 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5303 fld_worklist_push (DECL_RESULT (t), fld);
5304 }
5305 else if (TREE_CODE (t) == TYPE_DECL)
5306 {
5307 fld_worklist_push (DECL_ARGUMENT_FLD (t), fld);
5308 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
5309 }
5310 else if (TREE_CODE (t) == FIELD_DECL)
5311 {
5312 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5313 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5314 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5315 fld_worklist_push (DECL_FCONTEXT (t), fld);
5316 }
5317
5318 if ((TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
5319 && DECL_HAS_VALUE_EXPR_P (t))
5320 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5321
5322 if (TREE_CODE (t) != FIELD_DECL
5323 && TREE_CODE (t) != TYPE_DECL)
5324 fld_worklist_push (TREE_CHAIN (t), fld);
5325 *ws = 0;
5326 }
5327 else if (TYPE_P (t))
5328 {
5329 /* Note that walk_tree does not traverse every possible field in
5330 types, so we have to do our own traversals here. */
5331 add_tree_to_fld_list (t, fld);
5332
5333 if (!RECORD_OR_UNION_TYPE_P (t))
5334 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5335 fld_worklist_push (TYPE_SIZE (t), fld);
5336 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5337 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5338 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5339 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5340 fld_worklist_push (TYPE_NAME (t), fld);
5341 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5342 them and thus do not and want not to reach unused pointer types
5343 this way. */
5344 if (!POINTER_TYPE_P (t))
5345 fld_worklist_push (TYPE_MINVAL (t), fld);
5346 if (!RECORD_OR_UNION_TYPE_P (t))
5347 fld_worklist_push (TYPE_MAXVAL (t), fld);
5348 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5349 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5350 do not and want not to reach unused variants this way. */
5351 if (TYPE_CONTEXT (t))
5352 {
5353 tree ctx = TYPE_CONTEXT (t);
5354 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5355 So push that instead. */
5356 while (ctx && TREE_CODE (ctx) == BLOCK)
5357 ctx = BLOCK_SUPERCONTEXT (ctx);
5358 fld_worklist_push (ctx, fld);
5359 }
5360 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5361 and want not to reach unused types this way. */
5362
5363 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5364 {
5365 unsigned i;
5366 tree tem;
5367 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5368 fld_worklist_push (TREE_TYPE (tem), fld);
5369 tem = BINFO_VIRTUALS (TYPE_BINFO (t));
5370 if (tem
5371 /* The Java FE overloads BINFO_VIRTUALS for its own purpose. */
5372 && TREE_CODE (tem) == TREE_LIST)
5373 do
5374 {
5375 fld_worklist_push (TREE_VALUE (tem), fld);
5376 tem = TREE_CHAIN (tem);
5377 }
5378 while (tem);
5379 }
5380 if (RECORD_OR_UNION_TYPE_P (t))
5381 {
5382 tree tem;
5383 /* Push all TYPE_FIELDS - there can be interleaving interesting
5384 and non-interesting things. */
5385 tem = TYPE_FIELDS (t);
5386 while (tem)
5387 {
5388 if (TREE_CODE (tem) == FIELD_DECL
5389 || TREE_CODE (tem) == TYPE_DECL)
5390 fld_worklist_push (tem, fld);
5391 tem = TREE_CHAIN (tem);
5392 }
5393 }
5394
5395 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5396 *ws = 0;
5397 }
5398 else if (TREE_CODE (t) == BLOCK)
5399 {
5400 tree tem;
5401 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
5402 fld_worklist_push (tem, fld);
5403 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5404 fld_worklist_push (tem, fld);
5405 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5406 }
5407
5408 if (TREE_CODE (t) != IDENTIFIER_NODE
5409 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5410 fld_worklist_push (TREE_TYPE (t), fld);
5411
5412 return NULL_TREE;
5413 }
5414
5415
5416 /* Find decls and types in T. */
5417
5418 static void
5419 find_decls_types (tree t, struct free_lang_data_d *fld)
5420 {
5421 while (1)
5422 {
5423 if (!pointer_set_contains (fld->pset, t))
5424 walk_tree (&t, find_decls_types_r, fld, fld->pset);
5425 if (fld->worklist.is_empty ())
5426 break;
5427 t = fld->worklist.pop ();
5428 }
5429 }
5430
5431 /* Translate all the types in LIST with the corresponding runtime
5432 types. */
5433
5434 static tree
5435 get_eh_types_for_runtime (tree list)
5436 {
5437 tree head, prev;
5438
5439 if (list == NULL_TREE)
5440 return NULL_TREE;
5441
5442 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5443 prev = head;
5444 list = TREE_CHAIN (list);
5445 while (list)
5446 {
5447 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5448 TREE_CHAIN (prev) = n;
5449 prev = TREE_CHAIN (prev);
5450 list = TREE_CHAIN (list);
5451 }
5452
5453 return head;
5454 }
5455
5456
5457 /* Find decls and types referenced in EH region R and store them in
5458 FLD->DECLS and FLD->TYPES. */
5459
5460 static void
5461 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5462 {
5463 switch (r->type)
5464 {
5465 case ERT_CLEANUP:
5466 break;
5467
5468 case ERT_TRY:
5469 {
5470 eh_catch c;
5471
5472 /* The types referenced in each catch must first be changed to the
5473 EH types used at runtime. This removes references to FE types
5474 in the region. */
5475 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5476 {
5477 c->type_list = get_eh_types_for_runtime (c->type_list);
5478 walk_tree (&c->type_list, find_decls_types_r, fld, fld->pset);
5479 }
5480 }
5481 break;
5482
5483 case ERT_ALLOWED_EXCEPTIONS:
5484 r->u.allowed.type_list
5485 = get_eh_types_for_runtime (r->u.allowed.type_list);
5486 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, fld->pset);
5487 break;
5488
5489 case ERT_MUST_NOT_THROW:
5490 walk_tree (&r->u.must_not_throw.failure_decl,
5491 find_decls_types_r, fld, fld->pset);
5492 break;
5493 }
5494 }
5495
5496
5497 /* Find decls and types referenced in cgraph node N and store them in
5498 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5499 look for *every* kind of DECL and TYPE node reachable from N,
5500 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5501 NAMESPACE_DECLs, etc). */
5502
5503 static void
5504 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5505 {
5506 basic_block bb;
5507 struct function *fn;
5508 unsigned ix;
5509 tree t;
5510
5511 find_decls_types (n->decl, fld);
5512
5513 if (!gimple_has_body_p (n->decl))
5514 return;
5515
5516 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5517
5518 fn = DECL_STRUCT_FUNCTION (n->decl);
5519
5520 /* Traverse locals. */
5521 FOR_EACH_LOCAL_DECL (fn, ix, t)
5522 find_decls_types (t, fld);
5523
5524 /* Traverse EH regions in FN. */
5525 {
5526 eh_region r;
5527 FOR_ALL_EH_REGION_FN (r, fn)
5528 find_decls_types_in_eh_region (r, fld);
5529 }
5530
5531 /* Traverse every statement in FN. */
5532 FOR_EACH_BB_FN (bb, fn)
5533 {
5534 gimple_stmt_iterator si;
5535 unsigned i;
5536
5537 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
5538 {
5539 gimple phi = gsi_stmt (si);
5540
5541 for (i = 0; i < gimple_phi_num_args (phi); i++)
5542 {
5543 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5544 find_decls_types (*arg_p, fld);
5545 }
5546 }
5547
5548 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5549 {
5550 gimple stmt = gsi_stmt (si);
5551
5552 if (is_gimple_call (stmt))
5553 find_decls_types (gimple_call_fntype (stmt), fld);
5554
5555 for (i = 0; i < gimple_num_ops (stmt); i++)
5556 {
5557 tree arg = gimple_op (stmt, i);
5558 find_decls_types (arg, fld);
5559 }
5560 }
5561 }
5562 }
5563
5564
5565 /* Find decls and types referenced in varpool node N and store them in
5566 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5567 look for *every* kind of DECL and TYPE node reachable from N,
5568 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5569 NAMESPACE_DECLs, etc). */
5570
5571 static void
5572 find_decls_types_in_var (varpool_node *v, struct free_lang_data_d *fld)
5573 {
5574 find_decls_types (v->decl, fld);
5575 }
5576
5577 /* If T needs an assembler name, have one created for it. */
5578
5579 void
5580 assign_assembler_name_if_neeeded (tree t)
5581 {
5582 if (need_assembler_name_p (t))
5583 {
5584 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5585 diagnostics that use input_location to show locus
5586 information. The problem here is that, at this point,
5587 input_location is generally anchored to the end of the file
5588 (since the parser is long gone), so we don't have a good
5589 position to pin it to.
5590
5591 To alleviate this problem, this uses the location of T's
5592 declaration. Examples of this are
5593 testsuite/g++.dg/template/cond2.C and
5594 testsuite/g++.dg/template/pr35240.C. */
5595 location_t saved_location = input_location;
5596 input_location = DECL_SOURCE_LOCATION (t);
5597
5598 decl_assembler_name (t);
5599
5600 input_location = saved_location;
5601 }
5602 }
5603
5604
5605 /* Free language specific information for every operand and expression
5606 in every node of the call graph. This process operates in three stages:
5607
5608 1- Every callgraph node and varpool node is traversed looking for
5609 decls and types embedded in them. This is a more exhaustive
5610 search than that done by find_referenced_vars, because it will
5611 also collect individual fields, decls embedded in types, etc.
5612
5613 2- All the decls found are sent to free_lang_data_in_decl.
5614
5615 3- All the types found are sent to free_lang_data_in_type.
5616
5617 The ordering between decls and types is important because
5618 free_lang_data_in_decl sets assembler names, which includes
5619 mangling. So types cannot be freed up until assembler names have
5620 been set up. */
5621
5622 static void
5623 free_lang_data_in_cgraph (void)
5624 {
5625 struct cgraph_node *n;
5626 varpool_node *v;
5627 struct free_lang_data_d fld;
5628 tree t;
5629 unsigned i;
5630 alias_pair *p;
5631
5632 /* Initialize sets and arrays to store referenced decls and types. */
5633 fld.pset = pointer_set_create ();
5634 fld.worklist.create (0);
5635 fld.decls.create (100);
5636 fld.types.create (100);
5637
5638 /* Find decls and types in the body of every function in the callgraph. */
5639 FOR_EACH_FUNCTION (n)
5640 find_decls_types_in_node (n, &fld);
5641
5642 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
5643 find_decls_types (p->decl, &fld);
5644
5645 /* Find decls and types in every varpool symbol. */
5646 FOR_EACH_VARIABLE (v)
5647 find_decls_types_in_var (v, &fld);
5648
5649 /* Set the assembler name on every decl found. We need to do this
5650 now because free_lang_data_in_decl will invalidate data needed
5651 for mangling. This breaks mangling on interdependent decls. */
5652 FOR_EACH_VEC_ELT (fld.decls, i, t)
5653 assign_assembler_name_if_neeeded (t);
5654
5655 /* Traverse every decl found freeing its language data. */
5656 FOR_EACH_VEC_ELT (fld.decls, i, t)
5657 free_lang_data_in_decl (t);
5658
5659 /* Traverse every type found freeing its language data. */
5660 FOR_EACH_VEC_ELT (fld.types, i, t)
5661 free_lang_data_in_type (t);
5662
5663 pointer_set_destroy (fld.pset);
5664 fld.worklist.release ();
5665 fld.decls.release ();
5666 fld.types.release ();
5667 }
5668
5669
5670 /* Free resources that are used by FE but are not needed once they are done. */
5671
5672 static unsigned
5673 free_lang_data (void)
5674 {
5675 unsigned i;
5676
5677 /* If we are the LTO frontend we have freed lang-specific data already. */
5678 if (in_lto_p
5679 || !flag_generate_lto)
5680 return 0;
5681
5682 /* Allocate and assign alias sets to the standard integer types
5683 while the slots are still in the way the frontends generated them. */
5684 for (i = 0; i < itk_none; ++i)
5685 if (integer_types[i])
5686 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
5687
5688 /* Traverse the IL resetting language specific information for
5689 operands, expressions, etc. */
5690 free_lang_data_in_cgraph ();
5691
5692 /* Create gimple variants for common types. */
5693 ptrdiff_type_node = integer_type_node;
5694 fileptr_type_node = ptr_type_node;
5695
5696 /* Reset some langhooks. Do not reset types_compatible_p, it may
5697 still be used indirectly via the get_alias_set langhook. */
5698 lang_hooks.dwarf_name = lhd_dwarf_name;
5699 lang_hooks.decl_printable_name = gimple_decl_printable_name;
5700 /* We do not want the default decl_assembler_name implementation,
5701 rather if we have fixed everything we want a wrapper around it
5702 asserting that all non-local symbols already got their assembler
5703 name and only produce assembler names for local symbols. Or rather
5704 make sure we never call decl_assembler_name on local symbols and
5705 devise a separate, middle-end private scheme for it. */
5706
5707 /* Reset diagnostic machinery. */
5708 tree_diagnostics_defaults (global_dc);
5709
5710 return 0;
5711 }
5712
5713
5714 namespace {
5715
5716 const pass_data pass_data_ipa_free_lang_data =
5717 {
5718 SIMPLE_IPA_PASS, /* type */
5719 "*free_lang_data", /* name */
5720 OPTGROUP_NONE, /* optinfo_flags */
5721 TV_IPA_FREE_LANG_DATA, /* tv_id */
5722 0, /* properties_required */
5723 0, /* properties_provided */
5724 0, /* properties_destroyed */
5725 0, /* todo_flags_start */
5726 0, /* todo_flags_finish */
5727 };
5728
5729 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
5730 {
5731 public:
5732 pass_ipa_free_lang_data (gcc::context *ctxt)
5733 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
5734 {}
5735
5736 /* opt_pass methods: */
5737 virtual unsigned int execute (function *) { return free_lang_data (); }
5738
5739 }; // class pass_ipa_free_lang_data
5740
5741 } // anon namespace
5742
5743 simple_ipa_opt_pass *
5744 make_pass_ipa_free_lang_data (gcc::context *ctxt)
5745 {
5746 return new pass_ipa_free_lang_data (ctxt);
5747 }
5748
5749 /* The backbone of is_attribute_p(). ATTR_LEN is the string length of
5750 ATTR_NAME. Also used internally by remove_attribute(). */
5751 bool
5752 private_is_attribute_p (const char *attr_name, size_t attr_len, const_tree ident)
5753 {
5754 size_t ident_len = IDENTIFIER_LENGTH (ident);
5755
5756 if (ident_len == attr_len)
5757 {
5758 if (strcmp (attr_name, IDENTIFIER_POINTER (ident)) == 0)
5759 return true;
5760 }
5761 else if (ident_len == attr_len + 4)
5762 {
5763 /* There is the possibility that ATTR is 'text' and IDENT is
5764 '__text__'. */
5765 const char *p = IDENTIFIER_POINTER (ident);
5766 if (p[0] == '_' && p[1] == '_'
5767 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5768 && strncmp (attr_name, p + 2, attr_len) == 0)
5769 return true;
5770 }
5771
5772 return false;
5773 }
5774
5775 /* The backbone of lookup_attribute(). ATTR_LEN is the string length
5776 of ATTR_NAME, and LIST is not NULL_TREE. */
5777 tree
5778 private_lookup_attribute (const char *attr_name, size_t attr_len, tree list)
5779 {
5780 while (list)
5781 {
5782 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5783
5784 if (ident_len == attr_len)
5785 {
5786 if (!strcmp (attr_name,
5787 IDENTIFIER_POINTER (get_attribute_name (list))))
5788 break;
5789 }
5790 /* TODO: If we made sure that attributes were stored in the
5791 canonical form without '__...__' (ie, as in 'text' as opposed
5792 to '__text__') then we could avoid the following case. */
5793 else if (ident_len == attr_len + 4)
5794 {
5795 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5796 if (p[0] == '_' && p[1] == '_'
5797 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5798 && strncmp (attr_name, p + 2, attr_len) == 0)
5799 break;
5800 }
5801 list = TREE_CHAIN (list);
5802 }
5803
5804 return list;
5805 }
5806
5807 /* Given an attribute name ATTR_NAME and a list of attributes LIST,
5808 return a pointer to the attribute's list first element if the attribute
5809 starts with ATTR_NAME. ATTR_NAME must be in the form 'text' (not
5810 '__text__'). */
5811
5812 tree
5813 private_lookup_attribute_by_prefix (const char *attr_name, size_t attr_len,
5814 tree list)
5815 {
5816 while (list)
5817 {
5818 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5819
5820 if (attr_len > ident_len)
5821 {
5822 list = TREE_CHAIN (list);
5823 continue;
5824 }
5825
5826 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5827
5828 if (strncmp (attr_name, p, attr_len) == 0)
5829 break;
5830
5831 /* TODO: If we made sure that attributes were stored in the
5832 canonical form without '__...__' (ie, as in 'text' as opposed
5833 to '__text__') then we could avoid the following case. */
5834 if (p[0] == '_' && p[1] == '_' &&
5835 strncmp (attr_name, p + 2, attr_len) == 0)
5836 break;
5837
5838 list = TREE_CHAIN (list);
5839 }
5840
5841 return list;
5842 }
5843
5844
5845 /* A variant of lookup_attribute() that can be used with an identifier
5846 as the first argument, and where the identifier can be either
5847 'text' or '__text__'.
5848
5849 Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST,
5850 return a pointer to the attribute's list element if the attribute
5851 is part of the list, or NULL_TREE if not found. If the attribute
5852 appears more than once, this only returns the first occurrence; the
5853 TREE_CHAIN of the return value should be passed back in if further
5854 occurrences are wanted. ATTR_IDENTIFIER must be an identifier but
5855 can be in the form 'text' or '__text__'. */
5856 static tree
5857 lookup_ident_attribute (tree attr_identifier, tree list)
5858 {
5859 gcc_checking_assert (TREE_CODE (attr_identifier) == IDENTIFIER_NODE);
5860
5861 while (list)
5862 {
5863 gcc_checking_assert (TREE_CODE (get_attribute_name (list))
5864 == IDENTIFIER_NODE);
5865
5866 /* Identifiers can be compared directly for equality. */
5867 if (attr_identifier == get_attribute_name (list))
5868 break;
5869
5870 /* If they are not equal, they may still be one in the form
5871 'text' while the other one is in the form '__text__'. TODO:
5872 If we were storing attributes in normalized 'text' form, then
5873 this could all go away and we could take full advantage of
5874 the fact that we're comparing identifiers. :-) */
5875 {
5876 size_t attr_len = IDENTIFIER_LENGTH (attr_identifier);
5877 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5878
5879 if (ident_len == attr_len + 4)
5880 {
5881 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5882 const char *q = IDENTIFIER_POINTER (attr_identifier);
5883 if (p[0] == '_' && p[1] == '_'
5884 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5885 && strncmp (q, p + 2, attr_len) == 0)
5886 break;
5887 }
5888 else if (ident_len + 4 == attr_len)
5889 {
5890 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5891 const char *q = IDENTIFIER_POINTER (attr_identifier);
5892 if (q[0] == '_' && q[1] == '_'
5893 && q[attr_len - 2] == '_' && q[attr_len - 1] == '_'
5894 && strncmp (q + 2, p, ident_len) == 0)
5895 break;
5896 }
5897 }
5898 list = TREE_CHAIN (list);
5899 }
5900
5901 return list;
5902 }
5903
5904 /* Remove any instances of attribute ATTR_NAME in LIST and return the
5905 modified list. */
5906
5907 tree
5908 remove_attribute (const char *attr_name, tree list)
5909 {
5910 tree *p;
5911 size_t attr_len = strlen (attr_name);
5912
5913 gcc_checking_assert (attr_name[0] != '_');
5914
5915 for (p = &list; *p; )
5916 {
5917 tree l = *p;
5918 /* TODO: If we were storing attributes in normalized form, here
5919 we could use a simple strcmp(). */
5920 if (private_is_attribute_p (attr_name, attr_len, get_attribute_name (l)))
5921 *p = TREE_CHAIN (l);
5922 else
5923 p = &TREE_CHAIN (l);
5924 }
5925
5926 return list;
5927 }
5928
5929 /* Return an attribute list that is the union of a1 and a2. */
5930
5931 tree
5932 merge_attributes (tree a1, tree a2)
5933 {
5934 tree attributes;
5935
5936 /* Either one unset? Take the set one. */
5937
5938 if ((attributes = a1) == 0)
5939 attributes = a2;
5940
5941 /* One that completely contains the other? Take it. */
5942
5943 else if (a2 != 0 && ! attribute_list_contained (a1, a2))
5944 {
5945 if (attribute_list_contained (a2, a1))
5946 attributes = a2;
5947 else
5948 {
5949 /* Pick the longest list, and hang on the other list. */
5950
5951 if (list_length (a1) < list_length (a2))
5952 attributes = a2, a2 = a1;
5953
5954 for (; a2 != 0; a2 = TREE_CHAIN (a2))
5955 {
5956 tree a;
5957 for (a = lookup_ident_attribute (get_attribute_name (a2),
5958 attributes);
5959 a != NULL_TREE && !attribute_value_equal (a, a2);
5960 a = lookup_ident_attribute (get_attribute_name (a2),
5961 TREE_CHAIN (a)))
5962 ;
5963 if (a == NULL_TREE)
5964 {
5965 a1 = copy_node (a2);
5966 TREE_CHAIN (a1) = attributes;
5967 attributes = a1;
5968 }
5969 }
5970 }
5971 }
5972 return attributes;
5973 }
5974
5975 /* Given types T1 and T2, merge their attributes and return
5976 the result. */
5977
5978 tree
5979 merge_type_attributes (tree t1, tree t2)
5980 {
5981 return merge_attributes (TYPE_ATTRIBUTES (t1),
5982 TYPE_ATTRIBUTES (t2));
5983 }
5984
5985 /* Given decls OLDDECL and NEWDECL, merge their attributes and return
5986 the result. */
5987
5988 tree
5989 merge_decl_attributes (tree olddecl, tree newdecl)
5990 {
5991 return merge_attributes (DECL_ATTRIBUTES (olddecl),
5992 DECL_ATTRIBUTES (newdecl));
5993 }
5994
5995 #if TARGET_DLLIMPORT_DECL_ATTRIBUTES
5996
5997 /* Specialization of merge_decl_attributes for various Windows targets.
5998
5999 This handles the following situation:
6000
6001 __declspec (dllimport) int foo;
6002 int foo;
6003
6004 The second instance of `foo' nullifies the dllimport. */
6005
6006 tree
6007 merge_dllimport_decl_attributes (tree old, tree new_tree)
6008 {
6009 tree a;
6010 int delete_dllimport_p = 1;
6011
6012 /* What we need to do here is remove from `old' dllimport if it doesn't
6013 appear in `new'. dllimport behaves like extern: if a declaration is
6014 marked dllimport and a definition appears later, then the object
6015 is not dllimport'd. We also remove a `new' dllimport if the old list
6016 contains dllexport: dllexport always overrides dllimport, regardless
6017 of the order of declaration. */
6018 if (!VAR_OR_FUNCTION_DECL_P (new_tree))
6019 delete_dllimport_p = 0;
6020 else if (DECL_DLLIMPORT_P (new_tree)
6021 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old)))
6022 {
6023 DECL_DLLIMPORT_P (new_tree) = 0;
6024 warning (OPT_Wattributes, "%q+D already declared with dllexport attribute: "
6025 "dllimport ignored", new_tree);
6026 }
6027 else if (DECL_DLLIMPORT_P (old) && !DECL_DLLIMPORT_P (new_tree))
6028 {
6029 /* Warn about overriding a symbol that has already been used, e.g.:
6030 extern int __attribute__ ((dllimport)) foo;
6031 int* bar () {return &foo;}
6032 int foo;
6033 */
6034 if (TREE_USED (old))
6035 {
6036 warning (0, "%q+D redeclared without dllimport attribute "
6037 "after being referenced with dll linkage", new_tree);
6038 /* If we have used a variable's address with dllimport linkage,
6039 keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the
6040 decl may already have had TREE_CONSTANT computed.
6041 We still remove the attribute so that assembler code refers
6042 to '&foo rather than '_imp__foo'. */
6043 if (TREE_CODE (old) == VAR_DECL && TREE_ADDRESSABLE (old))
6044 DECL_DLLIMPORT_P (new_tree) = 1;
6045 }
6046
6047 /* Let an inline definition silently override the external reference,
6048 but otherwise warn about attribute inconsistency. */
6049 else if (TREE_CODE (new_tree) == VAR_DECL
6050 || !DECL_DECLARED_INLINE_P (new_tree))
6051 warning (OPT_Wattributes, "%q+D redeclared without dllimport attribute: "
6052 "previous dllimport ignored", new_tree);
6053 }
6054 else
6055 delete_dllimport_p = 0;
6056
6057 a = merge_attributes (DECL_ATTRIBUTES (old), DECL_ATTRIBUTES (new_tree));
6058
6059 if (delete_dllimport_p)
6060 a = remove_attribute ("dllimport", a);
6061
6062 return a;
6063 }
6064
6065 /* Handle a "dllimport" or "dllexport" attribute; arguments as in
6066 struct attribute_spec.handler. */
6067
6068 tree
6069 handle_dll_attribute (tree * pnode, tree name, tree args, int flags,
6070 bool *no_add_attrs)
6071 {
6072 tree node = *pnode;
6073 bool is_dllimport;
6074
6075 /* These attributes may apply to structure and union types being created,
6076 but otherwise should pass to the declaration involved. */
6077 if (!DECL_P (node))
6078 {
6079 if (flags & ((int) ATTR_FLAG_DECL_NEXT | (int) ATTR_FLAG_FUNCTION_NEXT
6080 | (int) ATTR_FLAG_ARRAY_NEXT))
6081 {
6082 *no_add_attrs = true;
6083 return tree_cons (name, args, NULL_TREE);
6084 }
6085 if (TREE_CODE (node) == RECORD_TYPE
6086 || TREE_CODE (node) == UNION_TYPE)
6087 {
6088 node = TYPE_NAME (node);
6089 if (!node)
6090 return NULL_TREE;
6091 }
6092 else
6093 {
6094 warning (OPT_Wattributes, "%qE attribute ignored",
6095 name);
6096 *no_add_attrs = true;
6097 return NULL_TREE;
6098 }
6099 }
6100
6101 if (TREE_CODE (node) != FUNCTION_DECL
6102 && TREE_CODE (node) != VAR_DECL
6103 && TREE_CODE (node) != TYPE_DECL)
6104 {
6105 *no_add_attrs = true;
6106 warning (OPT_Wattributes, "%qE attribute ignored",
6107 name);
6108 return NULL_TREE;
6109 }
6110
6111 if (TREE_CODE (node) == TYPE_DECL
6112 && TREE_CODE (TREE_TYPE (node)) != RECORD_TYPE
6113 && TREE_CODE (TREE_TYPE (node)) != UNION_TYPE)
6114 {
6115 *no_add_attrs = true;
6116 warning (OPT_Wattributes, "%qE attribute ignored",
6117 name);
6118 return NULL_TREE;
6119 }
6120
6121 is_dllimport = is_attribute_p ("dllimport", name);
6122
6123 /* Report error on dllimport ambiguities seen now before they cause
6124 any damage. */
6125 if (is_dllimport)
6126 {
6127 /* Honor any target-specific overrides. */
6128 if (!targetm.valid_dllimport_attribute_p (node))
6129 *no_add_attrs = true;
6130
6131 else if (TREE_CODE (node) == FUNCTION_DECL
6132 && DECL_DECLARED_INLINE_P (node))
6133 {
6134 warning (OPT_Wattributes, "inline function %q+D declared as "
6135 " dllimport: attribute ignored", node);
6136 *no_add_attrs = true;
6137 }
6138 /* Like MS, treat definition of dllimported variables and
6139 non-inlined functions on declaration as syntax errors. */
6140 else if (TREE_CODE (node) == FUNCTION_DECL && DECL_INITIAL (node))
6141 {
6142 error ("function %q+D definition is marked dllimport", node);
6143 *no_add_attrs = true;
6144 }
6145
6146 else if (TREE_CODE (node) == VAR_DECL)
6147 {
6148 if (DECL_INITIAL (node))
6149 {
6150 error ("variable %q+D definition is marked dllimport",
6151 node);
6152 *no_add_attrs = true;
6153 }
6154
6155 /* `extern' needn't be specified with dllimport.
6156 Specify `extern' now and hope for the best. Sigh. */
6157 DECL_EXTERNAL (node) = 1;
6158 /* Also, implicitly give dllimport'd variables declared within
6159 a function global scope, unless declared static. */
6160 if (current_function_decl != NULL_TREE && !TREE_STATIC (node))
6161 TREE_PUBLIC (node) = 1;
6162 }
6163
6164 if (*no_add_attrs == false)
6165 DECL_DLLIMPORT_P (node) = 1;
6166 }
6167 else if (TREE_CODE (node) == FUNCTION_DECL
6168 && DECL_DECLARED_INLINE_P (node)
6169 && flag_keep_inline_dllexport)
6170 /* An exported function, even if inline, must be emitted. */
6171 DECL_EXTERNAL (node) = 0;
6172
6173 /* Report error if symbol is not accessible at global scope. */
6174 if (!TREE_PUBLIC (node)
6175 && (TREE_CODE (node) == VAR_DECL
6176 || TREE_CODE (node) == FUNCTION_DECL))
6177 {
6178 error ("external linkage required for symbol %q+D because of "
6179 "%qE attribute", node, name);
6180 *no_add_attrs = true;
6181 }
6182
6183 /* A dllexport'd entity must have default visibility so that other
6184 program units (shared libraries or the main executable) can see
6185 it. A dllimport'd entity must have default visibility so that
6186 the linker knows that undefined references within this program
6187 unit can be resolved by the dynamic linker. */
6188 if (!*no_add_attrs)
6189 {
6190 if (DECL_VISIBILITY_SPECIFIED (node)
6191 && DECL_VISIBILITY (node) != VISIBILITY_DEFAULT)
6192 error ("%qE implies default visibility, but %qD has already "
6193 "been declared with a different visibility",
6194 name, node);
6195 DECL_VISIBILITY (node) = VISIBILITY_DEFAULT;
6196 DECL_VISIBILITY_SPECIFIED (node) = 1;
6197 }
6198
6199 return NULL_TREE;
6200 }
6201
6202 #endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */
6203 \f
6204 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6205 of the various TYPE_QUAL values. */
6206
6207 static void
6208 set_type_quals (tree type, int type_quals)
6209 {
6210 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6211 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6212 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6213 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6214 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6215 }
6216
6217 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6218
6219 bool
6220 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6221 {
6222 return (TYPE_QUALS (cand) == type_quals
6223 && TYPE_NAME (cand) == TYPE_NAME (base)
6224 /* Apparently this is needed for Objective-C. */
6225 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6226 /* Check alignment. */
6227 && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
6228 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6229 TYPE_ATTRIBUTES (base)));
6230 }
6231
6232 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6233
6234 static bool
6235 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6236 {
6237 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6238 && TYPE_NAME (cand) == TYPE_NAME (base)
6239 /* Apparently this is needed for Objective-C. */
6240 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6241 /* Check alignment. */
6242 && TYPE_ALIGN (cand) == align
6243 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6244 TYPE_ATTRIBUTES (base)));
6245 }
6246
6247 /* This function checks to see if TYPE matches the size one of the built-in
6248 atomic types, and returns that core atomic type. */
6249
6250 static tree
6251 find_atomic_core_type (tree type)
6252 {
6253 tree base_atomic_type;
6254
6255 /* Only handle complete types. */
6256 if (TYPE_SIZE (type) == NULL_TREE)
6257 return NULL_TREE;
6258
6259 HOST_WIDE_INT type_size = tree_to_uhwi (TYPE_SIZE (type));
6260 switch (type_size)
6261 {
6262 case 8:
6263 base_atomic_type = atomicQI_type_node;
6264 break;
6265
6266 case 16:
6267 base_atomic_type = atomicHI_type_node;
6268 break;
6269
6270 case 32:
6271 base_atomic_type = atomicSI_type_node;
6272 break;
6273
6274 case 64:
6275 base_atomic_type = atomicDI_type_node;
6276 break;
6277
6278 case 128:
6279 base_atomic_type = atomicTI_type_node;
6280 break;
6281
6282 default:
6283 base_atomic_type = NULL_TREE;
6284 }
6285
6286 return base_atomic_type;
6287 }
6288
6289 /* Return a version of the TYPE, qualified as indicated by the
6290 TYPE_QUALS, if one exists. If no qualified version exists yet,
6291 return NULL_TREE. */
6292
6293 tree
6294 get_qualified_type (tree type, int type_quals)
6295 {
6296 tree t;
6297
6298 if (TYPE_QUALS (type) == type_quals)
6299 return type;
6300
6301 /* Search the chain of variants to see if there is already one there just
6302 like the one we need to have. If so, use that existing one. We must
6303 preserve the TYPE_NAME, since there is code that depends on this. */
6304 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6305 if (check_qualified_type (t, type, type_quals))
6306 return t;
6307
6308 return NULL_TREE;
6309 }
6310
6311 /* Like get_qualified_type, but creates the type if it does not
6312 exist. This function never returns NULL_TREE. */
6313
6314 tree
6315 build_qualified_type (tree type, int type_quals)
6316 {
6317 tree t;
6318
6319 /* See if we already have the appropriate qualified variant. */
6320 t = get_qualified_type (type, type_quals);
6321
6322 /* If not, build it. */
6323 if (!t)
6324 {
6325 t = build_variant_type_copy (type);
6326 set_type_quals (t, type_quals);
6327
6328 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6329 {
6330 /* See if this object can map to a basic atomic type. */
6331 tree atomic_type = find_atomic_core_type (type);
6332 if (atomic_type)
6333 {
6334 /* Ensure the alignment of this type is compatible with
6335 the required alignment of the atomic type. */
6336 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6337 TYPE_ALIGN (t) = TYPE_ALIGN (atomic_type);
6338 }
6339 }
6340
6341 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6342 /* Propagate structural equality. */
6343 SET_TYPE_STRUCTURAL_EQUALITY (t);
6344 else if (TYPE_CANONICAL (type) != type)
6345 /* Build the underlying canonical type, since it is different
6346 from TYPE. */
6347 {
6348 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6349 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6350 }
6351 else
6352 /* T is its own canonical type. */
6353 TYPE_CANONICAL (t) = t;
6354
6355 }
6356
6357 return t;
6358 }
6359
6360 /* Create a variant of type T with alignment ALIGN. */
6361
6362 tree
6363 build_aligned_type (tree type, unsigned int align)
6364 {
6365 tree t;
6366
6367 if (TYPE_PACKED (type)
6368 || TYPE_ALIGN (type) == align)
6369 return type;
6370
6371 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6372 if (check_aligned_type (t, type, align))
6373 return t;
6374
6375 t = build_variant_type_copy (type);
6376 TYPE_ALIGN (t) = align;
6377
6378 return t;
6379 }
6380
6381 /* Create a new distinct copy of TYPE. The new type is made its own
6382 MAIN_VARIANT. If TYPE requires structural equality checks, the
6383 resulting type requires structural equality checks; otherwise, its
6384 TYPE_CANONICAL points to itself. */
6385
6386 tree
6387 build_distinct_type_copy (tree type)
6388 {
6389 tree t = copy_node (type);
6390
6391 TYPE_POINTER_TO (t) = 0;
6392 TYPE_REFERENCE_TO (t) = 0;
6393
6394 /* Set the canonical type either to a new equivalence class, or
6395 propagate the need for structural equality checks. */
6396 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6397 SET_TYPE_STRUCTURAL_EQUALITY (t);
6398 else
6399 TYPE_CANONICAL (t) = t;
6400
6401 /* Make it its own variant. */
6402 TYPE_MAIN_VARIANT (t) = t;
6403 TYPE_NEXT_VARIANT (t) = 0;
6404
6405 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6406 whose TREE_TYPE is not t. This can also happen in the Ada
6407 frontend when using subtypes. */
6408
6409 return t;
6410 }
6411
6412 /* Create a new variant of TYPE, equivalent but distinct. This is so
6413 the caller can modify it. TYPE_CANONICAL for the return type will
6414 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6415 are considered equal by the language itself (or that both types
6416 require structural equality checks). */
6417
6418 tree
6419 build_variant_type_copy (tree type)
6420 {
6421 tree t, m = TYPE_MAIN_VARIANT (type);
6422
6423 t = build_distinct_type_copy (type);
6424
6425 /* Since we're building a variant, assume that it is a non-semantic
6426 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6427 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6428
6429 /* Add the new type to the chain of variants of TYPE. */
6430 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6431 TYPE_NEXT_VARIANT (m) = t;
6432 TYPE_MAIN_VARIANT (t) = m;
6433
6434 return t;
6435 }
6436 \f
6437 /* Return true if the from tree in both tree maps are equal. */
6438
6439 int
6440 tree_map_base_eq (const void *va, const void *vb)
6441 {
6442 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6443 *const b = (const struct tree_map_base *) vb;
6444 return (a->from == b->from);
6445 }
6446
6447 /* Hash a from tree in a tree_base_map. */
6448
6449 unsigned int
6450 tree_map_base_hash (const void *item)
6451 {
6452 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6453 }
6454
6455 /* Return true if this tree map structure is marked for garbage collection
6456 purposes. We simply return true if the from tree is marked, so that this
6457 structure goes away when the from tree goes away. */
6458
6459 int
6460 tree_map_base_marked_p (const void *p)
6461 {
6462 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6463 }
6464
6465 /* Hash a from tree in a tree_map. */
6466
6467 unsigned int
6468 tree_map_hash (const void *item)
6469 {
6470 return (((const struct tree_map *) item)->hash);
6471 }
6472
6473 /* Hash a from tree in a tree_decl_map. */
6474
6475 unsigned int
6476 tree_decl_map_hash (const void *item)
6477 {
6478 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6479 }
6480
6481 /* Return the initialization priority for DECL. */
6482
6483 priority_type
6484 decl_init_priority_lookup (tree decl)
6485 {
6486 symtab_node *snode = symtab_get_node (decl);
6487
6488 if (!snode)
6489 return DEFAULT_INIT_PRIORITY;
6490 return
6491 snode->get_init_priority ();
6492 }
6493
6494 /* Return the finalization priority for DECL. */
6495
6496 priority_type
6497 decl_fini_priority_lookup (tree decl)
6498 {
6499 cgraph_node *node = cgraph_get_node (decl);
6500
6501 if (!node)
6502 return DEFAULT_INIT_PRIORITY;
6503 return
6504 node->get_fini_priority ();
6505 }
6506
6507 /* Set the initialization priority for DECL to PRIORITY. */
6508
6509 void
6510 decl_init_priority_insert (tree decl, priority_type priority)
6511 {
6512 struct symtab_node *snode;
6513
6514 if (priority == DEFAULT_INIT_PRIORITY)
6515 {
6516 snode = symtab_get_node (decl);
6517 if (!snode)
6518 return;
6519 }
6520 else if (TREE_CODE (decl) == VAR_DECL)
6521 snode = varpool_node_for_decl (decl);
6522 else
6523 snode = cgraph_get_create_node (decl);
6524 snode->set_init_priority (priority);
6525 }
6526
6527 /* Set the finalization priority for DECL to PRIORITY. */
6528
6529 void
6530 decl_fini_priority_insert (tree decl, priority_type priority)
6531 {
6532 struct cgraph_node *node;
6533
6534 if (priority == DEFAULT_INIT_PRIORITY)
6535 {
6536 node = cgraph_get_node (decl);
6537 if (!node)
6538 return;
6539 }
6540 else
6541 node = cgraph_get_create_node (decl);
6542 node->set_fini_priority (priority);
6543 }
6544
6545 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6546
6547 static void
6548 print_debug_expr_statistics (void)
6549 {
6550 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6551 (long) htab_size (debug_expr_for_decl),
6552 (long) htab_elements (debug_expr_for_decl),
6553 htab_collisions (debug_expr_for_decl));
6554 }
6555
6556 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6557
6558 static void
6559 print_value_expr_statistics (void)
6560 {
6561 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6562 (long) htab_size (value_expr_for_decl),
6563 (long) htab_elements (value_expr_for_decl),
6564 htab_collisions (value_expr_for_decl));
6565 }
6566
6567 /* Lookup a debug expression for FROM, and return it if we find one. */
6568
6569 tree
6570 decl_debug_expr_lookup (tree from)
6571 {
6572 struct tree_decl_map *h, in;
6573 in.base.from = from;
6574
6575 h = (struct tree_decl_map *)
6576 htab_find_with_hash (debug_expr_for_decl, &in, DECL_UID (from));
6577 if (h)
6578 return h->to;
6579 return NULL_TREE;
6580 }
6581
6582 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6583
6584 void
6585 decl_debug_expr_insert (tree from, tree to)
6586 {
6587 struct tree_decl_map *h;
6588 void **loc;
6589
6590 h = ggc_alloc<tree_decl_map> ();
6591 h->base.from = from;
6592 h->to = to;
6593 loc = htab_find_slot_with_hash (debug_expr_for_decl, h, DECL_UID (from),
6594 INSERT);
6595 *(struct tree_decl_map **) loc = h;
6596 }
6597
6598 /* Lookup a value expression for FROM, and return it if we find one. */
6599
6600 tree
6601 decl_value_expr_lookup (tree from)
6602 {
6603 struct tree_decl_map *h, in;
6604 in.base.from = from;
6605
6606 h = (struct tree_decl_map *)
6607 htab_find_with_hash (value_expr_for_decl, &in, DECL_UID (from));
6608 if (h)
6609 return h->to;
6610 return NULL_TREE;
6611 }
6612
6613 /* Insert a mapping FROM->TO in the value expression hashtable. */
6614
6615 void
6616 decl_value_expr_insert (tree from, tree to)
6617 {
6618 struct tree_decl_map *h;
6619 void **loc;
6620
6621 h = ggc_alloc<tree_decl_map> ();
6622 h->base.from = from;
6623 h->to = to;
6624 loc = htab_find_slot_with_hash (value_expr_for_decl, h, DECL_UID (from),
6625 INSERT);
6626 *(struct tree_decl_map **) loc = h;
6627 }
6628
6629 /* Lookup a vector of debug arguments for FROM, and return it if we
6630 find one. */
6631
6632 vec<tree, va_gc> **
6633 decl_debug_args_lookup (tree from)
6634 {
6635 struct tree_vec_map *h, in;
6636
6637 if (!DECL_HAS_DEBUG_ARGS_P (from))
6638 return NULL;
6639 gcc_checking_assert (debug_args_for_decl != NULL);
6640 in.base.from = from;
6641 h = (struct tree_vec_map *)
6642 htab_find_with_hash (debug_args_for_decl, &in, DECL_UID (from));
6643 if (h)
6644 return &h->to;
6645 return NULL;
6646 }
6647
6648 /* Insert a mapping FROM->empty vector of debug arguments in the value
6649 expression hashtable. */
6650
6651 vec<tree, va_gc> **
6652 decl_debug_args_insert (tree from)
6653 {
6654 struct tree_vec_map *h;
6655 void **loc;
6656
6657 if (DECL_HAS_DEBUG_ARGS_P (from))
6658 return decl_debug_args_lookup (from);
6659 if (debug_args_for_decl == NULL)
6660 debug_args_for_decl = htab_create_ggc (64, tree_vec_map_hash,
6661 tree_vec_map_eq, 0);
6662 h = ggc_alloc<tree_vec_map> ();
6663 h->base.from = from;
6664 h->to = NULL;
6665 loc = htab_find_slot_with_hash (debug_args_for_decl, h, DECL_UID (from),
6666 INSERT);
6667 *(struct tree_vec_map **) loc = h;
6668 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6669 return &h->to;
6670 }
6671
6672 /* Hashing of types so that we don't make duplicates.
6673 The entry point is `type_hash_canon'. */
6674
6675 /* Compute a hash code for a list of types (chain of TREE_LIST nodes
6676 with types in the TREE_VALUE slots), by adding the hash codes
6677 of the individual types. */
6678
6679 static unsigned int
6680 type_hash_list (const_tree list, hashval_t hashcode)
6681 {
6682 const_tree tail;
6683
6684 for (tail = list; tail; tail = TREE_CHAIN (tail))
6685 if (TREE_VALUE (tail) != error_mark_node)
6686 hashcode = iterative_hash_object (TYPE_HASH (TREE_VALUE (tail)),
6687 hashcode);
6688
6689 return hashcode;
6690 }
6691
6692 /* These are the Hashtable callback functions. */
6693
6694 /* Returns true iff the types are equivalent. */
6695
6696 static int
6697 type_hash_eq (const void *va, const void *vb)
6698 {
6699 const struct type_hash *const a = (const struct type_hash *) va,
6700 *const b = (const struct type_hash *) vb;
6701
6702 /* First test the things that are the same for all types. */
6703 if (a->hash != b->hash
6704 || TREE_CODE (a->type) != TREE_CODE (b->type)
6705 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6706 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6707 TYPE_ATTRIBUTES (b->type))
6708 || (TREE_CODE (a->type) != COMPLEX_TYPE
6709 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6710 return 0;
6711
6712 /* Be careful about comparing arrays before and after the element type
6713 has been completed; don't compare TYPE_ALIGN unless both types are
6714 complete. */
6715 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6716 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6717 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6718 return 0;
6719
6720 switch (TREE_CODE (a->type))
6721 {
6722 case VOID_TYPE:
6723 case COMPLEX_TYPE:
6724 case POINTER_TYPE:
6725 case REFERENCE_TYPE:
6726 case NULLPTR_TYPE:
6727 return 1;
6728
6729 case VECTOR_TYPE:
6730 return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type);
6731
6732 case ENUMERAL_TYPE:
6733 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6734 && !(TYPE_VALUES (a->type)
6735 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6736 && TYPE_VALUES (b->type)
6737 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6738 && type_list_equal (TYPE_VALUES (a->type),
6739 TYPE_VALUES (b->type))))
6740 return 0;
6741
6742 /* ... fall through ... */
6743
6744 case INTEGER_TYPE:
6745 case REAL_TYPE:
6746 case BOOLEAN_TYPE:
6747 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6748 return false;
6749 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6750 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6751 TYPE_MAX_VALUE (b->type)))
6752 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6753 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6754 TYPE_MIN_VALUE (b->type))));
6755
6756 case FIXED_POINT_TYPE:
6757 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6758
6759 case OFFSET_TYPE:
6760 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6761
6762 case METHOD_TYPE:
6763 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6764 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6765 || (TYPE_ARG_TYPES (a->type)
6766 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6767 && TYPE_ARG_TYPES (b->type)
6768 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6769 && type_list_equal (TYPE_ARG_TYPES (a->type),
6770 TYPE_ARG_TYPES (b->type)))))
6771 break;
6772 return 0;
6773 case ARRAY_TYPE:
6774 return TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type);
6775
6776 case RECORD_TYPE:
6777 case UNION_TYPE:
6778 case QUAL_UNION_TYPE:
6779 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6780 || (TYPE_FIELDS (a->type)
6781 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6782 && TYPE_FIELDS (b->type)
6783 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6784 && type_list_equal (TYPE_FIELDS (a->type),
6785 TYPE_FIELDS (b->type))));
6786
6787 case FUNCTION_TYPE:
6788 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6789 || (TYPE_ARG_TYPES (a->type)
6790 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6791 && TYPE_ARG_TYPES (b->type)
6792 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6793 && type_list_equal (TYPE_ARG_TYPES (a->type),
6794 TYPE_ARG_TYPES (b->type))))
6795 break;
6796 return 0;
6797
6798 default:
6799 return 0;
6800 }
6801
6802 if (lang_hooks.types.type_hash_eq != NULL)
6803 return lang_hooks.types.type_hash_eq (a->type, b->type);
6804
6805 return 1;
6806 }
6807
6808 /* Return the cached hash value. */
6809
6810 static hashval_t
6811 type_hash_hash (const void *item)
6812 {
6813 return ((const struct type_hash *) item)->hash;
6814 }
6815
6816 /* Look in the type hash table for a type isomorphic to TYPE.
6817 If one is found, return it. Otherwise return 0. */
6818
6819 static tree
6820 type_hash_lookup (hashval_t hashcode, tree type)
6821 {
6822 struct type_hash *h, in;
6823
6824 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6825 must call that routine before comparing TYPE_ALIGNs. */
6826 layout_type (type);
6827
6828 in.hash = hashcode;
6829 in.type = type;
6830
6831 h = (struct type_hash *) htab_find_with_hash (type_hash_table, &in,
6832 hashcode);
6833 if (h)
6834 return h->type;
6835 return NULL_TREE;
6836 }
6837
6838 /* Add an entry to the type-hash-table
6839 for a type TYPE whose hash code is HASHCODE. */
6840
6841 static void
6842 type_hash_add (hashval_t hashcode, tree type)
6843 {
6844 struct type_hash *h;
6845 void **loc;
6846
6847 h = ggc_alloc<type_hash> ();
6848 h->hash = hashcode;
6849 h->type = type;
6850 loc = htab_find_slot_with_hash (type_hash_table, h, hashcode, INSERT);
6851 *loc = (void *)h;
6852 }
6853
6854 /* Given TYPE, and HASHCODE its hash code, return the canonical
6855 object for an identical type if one already exists.
6856 Otherwise, return TYPE, and record it as the canonical object.
6857
6858 To use this function, first create a type of the sort you want.
6859 Then compute its hash code from the fields of the type that
6860 make it different from other similar types.
6861 Then call this function and use the value. */
6862
6863 tree
6864 type_hash_canon (unsigned int hashcode, tree type)
6865 {
6866 tree t1;
6867
6868 /* The hash table only contains main variants, so ensure that's what we're
6869 being passed. */
6870 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6871
6872 /* See if the type is in the hash table already. If so, return it.
6873 Otherwise, add the type. */
6874 t1 = type_hash_lookup (hashcode, type);
6875 if (t1 != 0)
6876 {
6877 if (GATHER_STATISTICS)
6878 {
6879 tree_code_counts[(int) TREE_CODE (type)]--;
6880 tree_node_counts[(int) t_kind]--;
6881 tree_node_sizes[(int) t_kind] -= sizeof (struct tree_type_non_common);
6882 }
6883 return t1;
6884 }
6885 else
6886 {
6887 type_hash_add (hashcode, type);
6888 return type;
6889 }
6890 }
6891
6892 /* See if the data pointed to by the type hash table is marked. We consider
6893 it marked if the type is marked or if a debug type number or symbol
6894 table entry has been made for the type. */
6895
6896 static int
6897 type_hash_marked_p (const void *p)
6898 {
6899 const_tree const type = ((const struct type_hash *) p)->type;
6900
6901 return ggc_marked_p (type);
6902 }
6903
6904 static void
6905 print_type_hash_statistics (void)
6906 {
6907 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
6908 (long) htab_size (type_hash_table),
6909 (long) htab_elements (type_hash_table),
6910 htab_collisions (type_hash_table));
6911 }
6912
6913 /* Compute a hash code for a list of attributes (chain of TREE_LIST nodes
6914 with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots),
6915 by adding the hash codes of the individual attributes. */
6916
6917 static unsigned int
6918 attribute_hash_list (const_tree list, hashval_t hashcode)
6919 {
6920 const_tree tail;
6921
6922 for (tail = list; tail; tail = TREE_CHAIN (tail))
6923 /* ??? Do we want to add in TREE_VALUE too? */
6924 hashcode = iterative_hash_object
6925 (IDENTIFIER_HASH_VALUE (get_attribute_name (tail)), hashcode);
6926 return hashcode;
6927 }
6928
6929 /* Given two lists of attributes, return true if list l2 is
6930 equivalent to l1. */
6931
6932 int
6933 attribute_list_equal (const_tree l1, const_tree l2)
6934 {
6935 if (l1 == l2)
6936 return 1;
6937
6938 return attribute_list_contained (l1, l2)
6939 && attribute_list_contained (l2, l1);
6940 }
6941
6942 /* Given two lists of attributes, return true if list L2 is
6943 completely contained within L1. */
6944 /* ??? This would be faster if attribute names were stored in a canonicalized
6945 form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method
6946 must be used to show these elements are equivalent (which they are). */
6947 /* ??? It's not clear that attributes with arguments will always be handled
6948 correctly. */
6949
6950 int
6951 attribute_list_contained (const_tree l1, const_tree l2)
6952 {
6953 const_tree t1, t2;
6954
6955 /* First check the obvious, maybe the lists are identical. */
6956 if (l1 == l2)
6957 return 1;
6958
6959 /* Maybe the lists are similar. */
6960 for (t1 = l1, t2 = l2;
6961 t1 != 0 && t2 != 0
6962 && get_attribute_name (t1) == get_attribute_name (t2)
6963 && TREE_VALUE (t1) == TREE_VALUE (t2);
6964 t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6965 ;
6966
6967 /* Maybe the lists are equal. */
6968 if (t1 == 0 && t2 == 0)
6969 return 1;
6970
6971 for (; t2 != 0; t2 = TREE_CHAIN (t2))
6972 {
6973 const_tree attr;
6974 /* This CONST_CAST is okay because lookup_attribute does not
6975 modify its argument and the return value is assigned to a
6976 const_tree. */
6977 for (attr = lookup_ident_attribute (get_attribute_name (t2),
6978 CONST_CAST_TREE (l1));
6979 attr != NULL_TREE && !attribute_value_equal (t2, attr);
6980 attr = lookup_ident_attribute (get_attribute_name (t2),
6981 TREE_CHAIN (attr)))
6982 ;
6983
6984 if (attr == NULL_TREE)
6985 return 0;
6986 }
6987
6988 return 1;
6989 }
6990
6991 /* Given two lists of types
6992 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6993 return 1 if the lists contain the same types in the same order.
6994 Also, the TREE_PURPOSEs must match. */
6995
6996 int
6997 type_list_equal (const_tree l1, const_tree l2)
6998 {
6999 const_tree t1, t2;
7000
7001 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7002 if (TREE_VALUE (t1) != TREE_VALUE (t2)
7003 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
7004 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
7005 && (TREE_TYPE (TREE_PURPOSE (t1))
7006 == TREE_TYPE (TREE_PURPOSE (t2))))))
7007 return 0;
7008
7009 return t1 == t2;
7010 }
7011
7012 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
7013 given by TYPE. If the argument list accepts variable arguments,
7014 then this function counts only the ordinary arguments. */
7015
7016 int
7017 type_num_arguments (const_tree type)
7018 {
7019 int i = 0;
7020 tree t;
7021
7022 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
7023 /* If the function does not take a variable number of arguments,
7024 the last element in the list will have type `void'. */
7025 if (VOID_TYPE_P (TREE_VALUE (t)))
7026 break;
7027 else
7028 ++i;
7029
7030 return i;
7031 }
7032
7033 /* Nonzero if integer constants T1 and T2
7034 represent the same constant value. */
7035
7036 int
7037 tree_int_cst_equal (const_tree t1, const_tree t2)
7038 {
7039 if (t1 == t2)
7040 return 1;
7041
7042 if (t1 == 0 || t2 == 0)
7043 return 0;
7044
7045 if (TREE_CODE (t1) == INTEGER_CST
7046 && TREE_CODE (t2) == INTEGER_CST
7047 && wi::to_widest (t1) == wi::to_widest (t2))
7048 return 1;
7049
7050 return 0;
7051 }
7052
7053 /* Return true if T is an INTEGER_CST whose numerical value (extended
7054 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
7055
7056 bool
7057 tree_fits_shwi_p (const_tree t)
7058 {
7059 return (t != NULL_TREE
7060 && TREE_CODE (t) == INTEGER_CST
7061 && wi::fits_shwi_p (wi::to_widest (t)));
7062 }
7063
7064 /* Return true if T is an INTEGER_CST whose numerical value (extended
7065 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
7066
7067 bool
7068 tree_fits_uhwi_p (const_tree t)
7069 {
7070 return (t != NULL_TREE
7071 && TREE_CODE (t) == INTEGER_CST
7072 && wi::fits_uhwi_p (wi::to_widest (t)));
7073 }
7074
7075 /* T is an INTEGER_CST whose numerical value (extended according to
7076 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
7077 HOST_WIDE_INT. */
7078
7079 HOST_WIDE_INT
7080 tree_to_shwi (const_tree t)
7081 {
7082 gcc_assert (tree_fits_shwi_p (t));
7083 return TREE_INT_CST_LOW (t);
7084 }
7085
7086 /* T is an INTEGER_CST whose numerical value (extended according to
7087 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7088 HOST_WIDE_INT. */
7089
7090 unsigned HOST_WIDE_INT
7091 tree_to_uhwi (const_tree t)
7092 {
7093 gcc_assert (tree_fits_uhwi_p (t));
7094 return TREE_INT_CST_LOW (t);
7095 }
7096
7097 /* Return the most significant (sign) bit of T. */
7098
7099 int
7100 tree_int_cst_sign_bit (const_tree t)
7101 {
7102 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7103
7104 return wi::extract_uhwi (t, bitno, 1);
7105 }
7106
7107 /* Return an indication of the sign of the integer constant T.
7108 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7109 Note that -1 will never be returned if T's type is unsigned. */
7110
7111 int
7112 tree_int_cst_sgn (const_tree t)
7113 {
7114 if (wi::eq_p (t, 0))
7115 return 0;
7116 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7117 return 1;
7118 else if (wi::neg_p (t))
7119 return -1;
7120 else
7121 return 1;
7122 }
7123
7124 /* Return the minimum number of bits needed to represent VALUE in a
7125 signed or unsigned type, UNSIGNEDP says which. */
7126
7127 unsigned int
7128 tree_int_cst_min_precision (tree value, signop sgn)
7129 {
7130 /* If the value is negative, compute its negative minus 1. The latter
7131 adjustment is because the absolute value of the largest negative value
7132 is one larger than the largest positive value. This is equivalent to
7133 a bit-wise negation, so use that operation instead. */
7134
7135 if (tree_int_cst_sgn (value) < 0)
7136 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7137
7138 /* Return the number of bits needed, taking into account the fact
7139 that we need one more bit for a signed than unsigned type.
7140 If value is 0 or -1, the minimum precision is 1 no matter
7141 whether unsignedp is true or false. */
7142
7143 if (integer_zerop (value))
7144 return 1;
7145 else
7146 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7147 }
7148
7149 /* Return truthvalue of whether T1 is the same tree structure as T2.
7150 Return 1 if they are the same.
7151 Return 0 if they are understandably different.
7152 Return -1 if either contains tree structure not understood by
7153 this function. */
7154
7155 int
7156 simple_cst_equal (const_tree t1, const_tree t2)
7157 {
7158 enum tree_code code1, code2;
7159 int cmp;
7160 int i;
7161
7162 if (t1 == t2)
7163 return 1;
7164 if (t1 == 0 || t2 == 0)
7165 return 0;
7166
7167 code1 = TREE_CODE (t1);
7168 code2 = TREE_CODE (t2);
7169
7170 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7171 {
7172 if (CONVERT_EXPR_CODE_P (code2)
7173 || code2 == NON_LVALUE_EXPR)
7174 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7175 else
7176 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7177 }
7178
7179 else if (CONVERT_EXPR_CODE_P (code2)
7180 || code2 == NON_LVALUE_EXPR)
7181 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7182
7183 if (code1 != code2)
7184 return 0;
7185
7186 switch (code1)
7187 {
7188 case INTEGER_CST:
7189 return wi::to_widest (t1) == wi::to_widest (t2);
7190
7191 case REAL_CST:
7192 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (t1), TREE_REAL_CST (t2));
7193
7194 case FIXED_CST:
7195 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7196
7197 case STRING_CST:
7198 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7199 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7200 TREE_STRING_LENGTH (t1)));
7201
7202 case CONSTRUCTOR:
7203 {
7204 unsigned HOST_WIDE_INT idx;
7205 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7206 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7207
7208 if (vec_safe_length (v1) != vec_safe_length (v2))
7209 return false;
7210
7211 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7212 /* ??? Should we handle also fields here? */
7213 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7214 return false;
7215 return true;
7216 }
7217
7218 case SAVE_EXPR:
7219 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7220
7221 case CALL_EXPR:
7222 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7223 if (cmp <= 0)
7224 return cmp;
7225 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7226 return 0;
7227 {
7228 const_tree arg1, arg2;
7229 const_call_expr_arg_iterator iter1, iter2;
7230 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7231 arg2 = first_const_call_expr_arg (t2, &iter2);
7232 arg1 && arg2;
7233 arg1 = next_const_call_expr_arg (&iter1),
7234 arg2 = next_const_call_expr_arg (&iter2))
7235 {
7236 cmp = simple_cst_equal (arg1, arg2);
7237 if (cmp <= 0)
7238 return cmp;
7239 }
7240 return arg1 == arg2;
7241 }
7242
7243 case TARGET_EXPR:
7244 /* Special case: if either target is an unallocated VAR_DECL,
7245 it means that it's going to be unified with whatever the
7246 TARGET_EXPR is really supposed to initialize, so treat it
7247 as being equivalent to anything. */
7248 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7249 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7250 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7251 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7252 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7253 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7254 cmp = 1;
7255 else
7256 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7257
7258 if (cmp <= 0)
7259 return cmp;
7260
7261 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7262
7263 case WITH_CLEANUP_EXPR:
7264 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7265 if (cmp <= 0)
7266 return cmp;
7267
7268 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7269
7270 case COMPONENT_REF:
7271 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7272 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7273
7274 return 0;
7275
7276 case VAR_DECL:
7277 case PARM_DECL:
7278 case CONST_DECL:
7279 case FUNCTION_DECL:
7280 return 0;
7281
7282 default:
7283 break;
7284 }
7285
7286 /* This general rule works for most tree codes. All exceptions should be
7287 handled above. If this is a language-specific tree code, we can't
7288 trust what might be in the operand, so say we don't know
7289 the situation. */
7290 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7291 return -1;
7292
7293 switch (TREE_CODE_CLASS (code1))
7294 {
7295 case tcc_unary:
7296 case tcc_binary:
7297 case tcc_comparison:
7298 case tcc_expression:
7299 case tcc_reference:
7300 case tcc_statement:
7301 cmp = 1;
7302 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7303 {
7304 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7305 if (cmp <= 0)
7306 return cmp;
7307 }
7308
7309 return cmp;
7310
7311 default:
7312 return -1;
7313 }
7314 }
7315
7316 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7317 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7318 than U, respectively. */
7319
7320 int
7321 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7322 {
7323 if (tree_int_cst_sgn (t) < 0)
7324 return -1;
7325 else if (!tree_fits_uhwi_p (t))
7326 return 1;
7327 else if (TREE_INT_CST_LOW (t) == u)
7328 return 0;
7329 else if (TREE_INT_CST_LOW (t) < u)
7330 return -1;
7331 else
7332 return 1;
7333 }
7334
7335 /* Return true if SIZE represents a constant size that is in bounds of
7336 what the middle-end and the backend accepts (covering not more than
7337 half of the address-space). */
7338
7339 bool
7340 valid_constant_size_p (const_tree size)
7341 {
7342 if (! tree_fits_uhwi_p (size)
7343 || TREE_OVERFLOW (size)
7344 || tree_int_cst_sign_bit (size) != 0)
7345 return false;
7346 return true;
7347 }
7348
7349 /* Return the precision of the type, or for a complex or vector type the
7350 precision of the type of its elements. */
7351
7352 unsigned int
7353 element_precision (const_tree type)
7354 {
7355 enum tree_code code = TREE_CODE (type);
7356 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7357 type = TREE_TYPE (type);
7358
7359 return TYPE_PRECISION (type);
7360 }
7361
7362 /* Return true if CODE represents an associative tree code. Otherwise
7363 return false. */
7364 bool
7365 associative_tree_code (enum tree_code code)
7366 {
7367 switch (code)
7368 {
7369 case BIT_IOR_EXPR:
7370 case BIT_AND_EXPR:
7371 case BIT_XOR_EXPR:
7372 case PLUS_EXPR:
7373 case MULT_EXPR:
7374 case MIN_EXPR:
7375 case MAX_EXPR:
7376 return true;
7377
7378 default:
7379 break;
7380 }
7381 return false;
7382 }
7383
7384 /* Return true if CODE represents a commutative tree code. Otherwise
7385 return false. */
7386 bool
7387 commutative_tree_code (enum tree_code code)
7388 {
7389 switch (code)
7390 {
7391 case PLUS_EXPR:
7392 case MULT_EXPR:
7393 case MULT_HIGHPART_EXPR:
7394 case MIN_EXPR:
7395 case MAX_EXPR:
7396 case BIT_IOR_EXPR:
7397 case BIT_XOR_EXPR:
7398 case BIT_AND_EXPR:
7399 case NE_EXPR:
7400 case EQ_EXPR:
7401 case UNORDERED_EXPR:
7402 case ORDERED_EXPR:
7403 case UNEQ_EXPR:
7404 case LTGT_EXPR:
7405 case TRUTH_AND_EXPR:
7406 case TRUTH_XOR_EXPR:
7407 case TRUTH_OR_EXPR:
7408 case WIDEN_MULT_EXPR:
7409 case VEC_WIDEN_MULT_HI_EXPR:
7410 case VEC_WIDEN_MULT_LO_EXPR:
7411 case VEC_WIDEN_MULT_EVEN_EXPR:
7412 case VEC_WIDEN_MULT_ODD_EXPR:
7413 return true;
7414
7415 default:
7416 break;
7417 }
7418 return false;
7419 }
7420
7421 /* Return true if CODE represents a ternary tree code for which the
7422 first two operands are commutative. Otherwise return false. */
7423 bool
7424 commutative_ternary_tree_code (enum tree_code code)
7425 {
7426 switch (code)
7427 {
7428 case WIDEN_MULT_PLUS_EXPR:
7429 case WIDEN_MULT_MINUS_EXPR:
7430 return true;
7431
7432 default:
7433 break;
7434 }
7435 return false;
7436 }
7437
7438 /* Generate a hash value for an expression. This can be used iteratively
7439 by passing a previous result as the VAL argument.
7440
7441 This function is intended to produce the same hash for expressions which
7442 would compare equal using operand_equal_p. */
7443
7444 hashval_t
7445 iterative_hash_expr (const_tree t, hashval_t val)
7446 {
7447 int i;
7448 enum tree_code code;
7449 enum tree_code_class tclass;
7450
7451 if (t == NULL_TREE)
7452 return iterative_hash_hashval_t (0, val);
7453
7454 code = TREE_CODE (t);
7455
7456 switch (code)
7457 {
7458 /* Alas, constants aren't shared, so we can't rely on pointer
7459 identity. */
7460 case VOID_CST:
7461 return iterative_hash_hashval_t (0, val);
7462 case INTEGER_CST:
7463 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
7464 val = iterative_hash_host_wide_int (TREE_INT_CST_ELT (t, i), val);
7465 return val;
7466 case REAL_CST:
7467 {
7468 unsigned int val2 = real_hash (TREE_REAL_CST_PTR (t));
7469
7470 return iterative_hash_hashval_t (val2, val);
7471 }
7472 case FIXED_CST:
7473 {
7474 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7475
7476 return iterative_hash_hashval_t (val2, val);
7477 }
7478 case STRING_CST:
7479 return iterative_hash (TREE_STRING_POINTER (t),
7480 TREE_STRING_LENGTH (t), val);
7481 case COMPLEX_CST:
7482 val = iterative_hash_expr (TREE_REALPART (t), val);
7483 return iterative_hash_expr (TREE_IMAGPART (t), val);
7484 case VECTOR_CST:
7485 {
7486 unsigned i;
7487 for (i = 0; i < VECTOR_CST_NELTS (t); ++i)
7488 val = iterative_hash_expr (VECTOR_CST_ELT (t, i), val);
7489 return val;
7490 }
7491 case SSA_NAME:
7492 /* We can just compare by pointer. */
7493 return iterative_hash_host_wide_int (SSA_NAME_VERSION (t), val);
7494 case PLACEHOLDER_EXPR:
7495 /* The node itself doesn't matter. */
7496 return val;
7497 case TREE_LIST:
7498 /* A list of expressions, for a CALL_EXPR or as the elements of a
7499 VECTOR_CST. */
7500 for (; t; t = TREE_CHAIN (t))
7501 val = iterative_hash_expr (TREE_VALUE (t), val);
7502 return val;
7503 case CONSTRUCTOR:
7504 {
7505 unsigned HOST_WIDE_INT idx;
7506 tree field, value;
7507 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7508 {
7509 val = iterative_hash_expr (field, val);
7510 val = iterative_hash_expr (value, val);
7511 }
7512 return val;
7513 }
7514 case FUNCTION_DECL:
7515 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7516 Otherwise nodes that compare equal according to operand_equal_p might
7517 get different hash codes. However, don't do this for machine specific
7518 or front end builtins, since the function code is overloaded in those
7519 cases. */
7520 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7521 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7522 {
7523 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7524 code = TREE_CODE (t);
7525 }
7526 /* FALL THROUGH */
7527 default:
7528 tclass = TREE_CODE_CLASS (code);
7529
7530 if (tclass == tcc_declaration)
7531 {
7532 /* DECL's have a unique ID */
7533 val = iterative_hash_host_wide_int (DECL_UID (t), val);
7534 }
7535 else
7536 {
7537 gcc_assert (IS_EXPR_CODE_CLASS (tclass));
7538
7539 val = iterative_hash_object (code, val);
7540
7541 /* Don't hash the type, that can lead to having nodes which
7542 compare equal according to operand_equal_p, but which
7543 have different hash codes. */
7544 if (CONVERT_EXPR_CODE_P (code)
7545 || code == NON_LVALUE_EXPR)
7546 {
7547 /* Make sure to include signness in the hash computation. */
7548 val += TYPE_UNSIGNED (TREE_TYPE (t));
7549 val = iterative_hash_expr (TREE_OPERAND (t, 0), val);
7550 }
7551
7552 else if (commutative_tree_code (code))
7553 {
7554 /* It's a commutative expression. We want to hash it the same
7555 however it appears. We do this by first hashing both operands
7556 and then rehashing based on the order of their independent
7557 hashes. */
7558 hashval_t one = iterative_hash_expr (TREE_OPERAND (t, 0), 0);
7559 hashval_t two = iterative_hash_expr (TREE_OPERAND (t, 1), 0);
7560 hashval_t t;
7561
7562 if (one > two)
7563 t = one, one = two, two = t;
7564
7565 val = iterative_hash_hashval_t (one, val);
7566 val = iterative_hash_hashval_t (two, val);
7567 }
7568 else
7569 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
7570 val = iterative_hash_expr (TREE_OPERAND (t, i), val);
7571 }
7572 return val;
7573 }
7574 }
7575
7576 /* Constructors for pointer, array and function types.
7577 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7578 constructed by language-dependent code, not here.) */
7579
7580 /* Construct, lay out and return the type of pointers to TO_TYPE with
7581 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7582 reference all of memory. If such a type has already been
7583 constructed, reuse it. */
7584
7585 tree
7586 build_pointer_type_for_mode (tree to_type, enum machine_mode mode,
7587 bool can_alias_all)
7588 {
7589 tree t;
7590
7591 if (to_type == error_mark_node)
7592 return error_mark_node;
7593
7594 /* If the pointed-to type has the may_alias attribute set, force
7595 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7596 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7597 can_alias_all = true;
7598
7599 /* In some cases, languages will have things that aren't a POINTER_TYPE
7600 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7601 In that case, return that type without regard to the rest of our
7602 operands.
7603
7604 ??? This is a kludge, but consistent with the way this function has
7605 always operated and there doesn't seem to be a good way to avoid this
7606 at the moment. */
7607 if (TYPE_POINTER_TO (to_type) != 0
7608 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7609 return TYPE_POINTER_TO (to_type);
7610
7611 /* First, if we already have a type for pointers to TO_TYPE and it's
7612 the proper mode, use it. */
7613 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7614 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7615 return t;
7616
7617 t = make_node (POINTER_TYPE);
7618
7619 TREE_TYPE (t) = to_type;
7620 SET_TYPE_MODE (t, mode);
7621 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7622 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7623 TYPE_POINTER_TO (to_type) = t;
7624
7625 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7626 SET_TYPE_STRUCTURAL_EQUALITY (t);
7627 else if (TYPE_CANONICAL (to_type) != to_type)
7628 TYPE_CANONICAL (t)
7629 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7630 mode, can_alias_all);
7631
7632 /* Lay out the type. This function has many callers that are concerned
7633 with expression-construction, and this simplifies them all. */
7634 layout_type (t);
7635
7636 return t;
7637 }
7638
7639 /* By default build pointers in ptr_mode. */
7640
7641 tree
7642 build_pointer_type (tree to_type)
7643 {
7644 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7645 : TYPE_ADDR_SPACE (to_type);
7646 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7647 return build_pointer_type_for_mode (to_type, pointer_mode, false);
7648 }
7649
7650 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7651
7652 tree
7653 build_reference_type_for_mode (tree to_type, enum machine_mode mode,
7654 bool can_alias_all)
7655 {
7656 tree t;
7657
7658 if (to_type == error_mark_node)
7659 return error_mark_node;
7660
7661 /* If the pointed-to type has the may_alias attribute set, force
7662 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7663 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7664 can_alias_all = true;
7665
7666 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7667 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7668 In that case, return that type without regard to the rest of our
7669 operands.
7670
7671 ??? This is a kludge, but consistent with the way this function has
7672 always operated and there doesn't seem to be a good way to avoid this
7673 at the moment. */
7674 if (TYPE_REFERENCE_TO (to_type) != 0
7675 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7676 return TYPE_REFERENCE_TO (to_type);
7677
7678 /* First, if we already have a type for pointers to TO_TYPE and it's
7679 the proper mode, use it. */
7680 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7681 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7682 return t;
7683
7684 t = make_node (REFERENCE_TYPE);
7685
7686 TREE_TYPE (t) = to_type;
7687 SET_TYPE_MODE (t, mode);
7688 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7689 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7690 TYPE_REFERENCE_TO (to_type) = t;
7691
7692 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7693 SET_TYPE_STRUCTURAL_EQUALITY (t);
7694 else if (TYPE_CANONICAL (to_type) != to_type)
7695 TYPE_CANONICAL (t)
7696 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7697 mode, can_alias_all);
7698
7699 layout_type (t);
7700
7701 return t;
7702 }
7703
7704
7705 /* Build the node for the type of references-to-TO_TYPE by default
7706 in ptr_mode. */
7707
7708 tree
7709 build_reference_type (tree to_type)
7710 {
7711 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7712 : TYPE_ADDR_SPACE (to_type);
7713 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7714 return build_reference_type_for_mode (to_type, pointer_mode, false);
7715 }
7716
7717 #define MAX_INT_CACHED_PREC \
7718 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7719 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7720
7721 /* Builds a signed or unsigned integer type of precision PRECISION.
7722 Used for C bitfields whose precision does not match that of
7723 built-in target types. */
7724 tree
7725 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7726 int unsignedp)
7727 {
7728 tree itype, ret;
7729
7730 if (unsignedp)
7731 unsignedp = MAX_INT_CACHED_PREC + 1;
7732
7733 if (precision <= MAX_INT_CACHED_PREC)
7734 {
7735 itype = nonstandard_integer_type_cache[precision + unsignedp];
7736 if (itype)
7737 return itype;
7738 }
7739
7740 itype = make_node (INTEGER_TYPE);
7741 TYPE_PRECISION (itype) = precision;
7742
7743 if (unsignedp)
7744 fixup_unsigned_type (itype);
7745 else
7746 fixup_signed_type (itype);
7747
7748 ret = itype;
7749 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype)))
7750 ret = type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype)), itype);
7751 if (precision <= MAX_INT_CACHED_PREC)
7752 nonstandard_integer_type_cache[precision + unsignedp] = ret;
7753
7754 return ret;
7755 }
7756
7757 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7758 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7759 is true, reuse such a type that has already been constructed. */
7760
7761 static tree
7762 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7763 {
7764 tree itype = make_node (INTEGER_TYPE);
7765 hashval_t hashcode = 0;
7766
7767 TREE_TYPE (itype) = type;
7768
7769 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7770 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7771
7772 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7773 SET_TYPE_MODE (itype, TYPE_MODE (type));
7774 TYPE_SIZE (itype) = TYPE_SIZE (type);
7775 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7776 TYPE_ALIGN (itype) = TYPE_ALIGN (type);
7777 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7778
7779 if (!shared)
7780 return itype;
7781
7782 if ((TYPE_MIN_VALUE (itype)
7783 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7784 || (TYPE_MAX_VALUE (itype)
7785 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7786 {
7787 /* Since we cannot reliably merge this type, we need to compare it using
7788 structural equality checks. */
7789 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7790 return itype;
7791 }
7792
7793 hashcode = iterative_hash_expr (TYPE_MIN_VALUE (itype), hashcode);
7794 hashcode = iterative_hash_expr (TYPE_MAX_VALUE (itype), hashcode);
7795 hashcode = iterative_hash_hashval_t (TYPE_HASH (type), hashcode);
7796 itype = type_hash_canon (hashcode, itype);
7797
7798 return itype;
7799 }
7800
7801 /* Wrapper around build_range_type_1 with SHARED set to true. */
7802
7803 tree
7804 build_range_type (tree type, tree lowval, tree highval)
7805 {
7806 return build_range_type_1 (type, lowval, highval, true);
7807 }
7808
7809 /* Wrapper around build_range_type_1 with SHARED set to false. */
7810
7811 tree
7812 build_nonshared_range_type (tree type, tree lowval, tree highval)
7813 {
7814 return build_range_type_1 (type, lowval, highval, false);
7815 }
7816
7817 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7818 MAXVAL should be the maximum value in the domain
7819 (one less than the length of the array).
7820
7821 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7822 We don't enforce this limit, that is up to caller (e.g. language front end).
7823 The limit exists because the result is a signed type and we don't handle
7824 sizes that use more than one HOST_WIDE_INT. */
7825
7826 tree
7827 build_index_type (tree maxval)
7828 {
7829 return build_range_type (sizetype, size_zero_node, maxval);
7830 }
7831
7832 /* Return true if the debug information for TYPE, a subtype, should be emitted
7833 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7834 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7835 debug info and doesn't reflect the source code. */
7836
7837 bool
7838 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7839 {
7840 tree base_type = TREE_TYPE (type), low, high;
7841
7842 /* Subrange types have a base type which is an integral type. */
7843 if (!INTEGRAL_TYPE_P (base_type))
7844 return false;
7845
7846 /* Get the real bounds of the subtype. */
7847 if (lang_hooks.types.get_subrange_bounds)
7848 lang_hooks.types.get_subrange_bounds (type, &low, &high);
7849 else
7850 {
7851 low = TYPE_MIN_VALUE (type);
7852 high = TYPE_MAX_VALUE (type);
7853 }
7854
7855 /* If the type and its base type have the same representation and the same
7856 name, then the type is not a subrange but a copy of the base type. */
7857 if ((TREE_CODE (base_type) == INTEGER_TYPE
7858 || TREE_CODE (base_type) == BOOLEAN_TYPE)
7859 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
7860 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
7861 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
7862 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
7863 return false;
7864
7865 if (lowval)
7866 *lowval = low;
7867 if (highval)
7868 *highval = high;
7869 return true;
7870 }
7871
7872 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7873 and number of elements specified by the range of values of INDEX_TYPE.
7874 If SHARED is true, reuse such a type that has already been constructed. */
7875
7876 static tree
7877 build_array_type_1 (tree elt_type, tree index_type, bool shared)
7878 {
7879 tree t;
7880
7881 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
7882 {
7883 error ("arrays of functions are not meaningful");
7884 elt_type = integer_type_node;
7885 }
7886
7887 t = make_node (ARRAY_TYPE);
7888 TREE_TYPE (t) = elt_type;
7889 TYPE_DOMAIN (t) = index_type;
7890 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
7891 layout_type (t);
7892
7893 /* If the element type is incomplete at this point we get marked for
7894 structural equality. Do not record these types in the canonical
7895 type hashtable. */
7896 if (TYPE_STRUCTURAL_EQUALITY_P (t))
7897 return t;
7898
7899 if (shared)
7900 {
7901 hashval_t hashcode = iterative_hash_object (TYPE_HASH (elt_type), 0);
7902 if (index_type)
7903 hashcode = iterative_hash_object (TYPE_HASH (index_type), hashcode);
7904 t = type_hash_canon (hashcode, t);
7905 }
7906
7907 if (TYPE_CANONICAL (t) == t)
7908 {
7909 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
7910 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type)))
7911 SET_TYPE_STRUCTURAL_EQUALITY (t);
7912 else if (TYPE_CANONICAL (elt_type) != elt_type
7913 || (index_type && TYPE_CANONICAL (index_type) != index_type))
7914 TYPE_CANONICAL (t)
7915 = build_array_type_1 (TYPE_CANONICAL (elt_type),
7916 index_type
7917 ? TYPE_CANONICAL (index_type) : NULL_TREE,
7918 shared);
7919 }
7920
7921 return t;
7922 }
7923
7924 /* Wrapper around build_array_type_1 with SHARED set to true. */
7925
7926 tree
7927 build_array_type (tree elt_type, tree index_type)
7928 {
7929 return build_array_type_1 (elt_type, index_type, true);
7930 }
7931
7932 /* Wrapper around build_array_type_1 with SHARED set to false. */
7933
7934 tree
7935 build_nonshared_array_type (tree elt_type, tree index_type)
7936 {
7937 return build_array_type_1 (elt_type, index_type, false);
7938 }
7939
7940 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7941 sizetype. */
7942
7943 tree
7944 build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
7945 {
7946 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
7947 }
7948
7949 /* Recursively examines the array elements of TYPE, until a non-array
7950 element type is found. */
7951
7952 tree
7953 strip_array_types (tree type)
7954 {
7955 while (TREE_CODE (type) == ARRAY_TYPE)
7956 type = TREE_TYPE (type);
7957
7958 return type;
7959 }
7960
7961 /* Computes the canonical argument types from the argument type list
7962 ARGTYPES.
7963
7964 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7965 on entry to this function, or if any of the ARGTYPES are
7966 structural.
7967
7968 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7969 true on entry to this function, or if any of the ARGTYPES are
7970 non-canonical.
7971
7972 Returns a canonical argument list, which may be ARGTYPES when the
7973 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7974 true) or would not differ from ARGTYPES. */
7975
7976 static tree
7977 maybe_canonicalize_argtypes (tree argtypes,
7978 bool *any_structural_p,
7979 bool *any_noncanonical_p)
7980 {
7981 tree arg;
7982 bool any_noncanonical_argtypes_p = false;
7983
7984 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
7985 {
7986 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
7987 /* Fail gracefully by stating that the type is structural. */
7988 *any_structural_p = true;
7989 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
7990 *any_structural_p = true;
7991 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
7992 || TREE_PURPOSE (arg))
7993 /* If the argument has a default argument, we consider it
7994 non-canonical even though the type itself is canonical.
7995 That way, different variants of function and method types
7996 with default arguments will all point to the variant with
7997 no defaults as their canonical type. */
7998 any_noncanonical_argtypes_p = true;
7999 }
8000
8001 if (*any_structural_p)
8002 return argtypes;
8003
8004 if (any_noncanonical_argtypes_p)
8005 {
8006 /* Build the canonical list of argument types. */
8007 tree canon_argtypes = NULL_TREE;
8008 bool is_void = false;
8009
8010 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
8011 {
8012 if (arg == void_list_node)
8013 is_void = true;
8014 else
8015 canon_argtypes = tree_cons (NULL_TREE,
8016 TYPE_CANONICAL (TREE_VALUE (arg)),
8017 canon_argtypes);
8018 }
8019
8020 canon_argtypes = nreverse (canon_argtypes);
8021 if (is_void)
8022 canon_argtypes = chainon (canon_argtypes, void_list_node);
8023
8024 /* There is a non-canonical type. */
8025 *any_noncanonical_p = true;
8026 return canon_argtypes;
8027 }
8028
8029 /* The canonical argument types are the same as ARGTYPES. */
8030 return argtypes;
8031 }
8032
8033 /* Construct, lay out and return
8034 the type of functions returning type VALUE_TYPE
8035 given arguments of types ARG_TYPES.
8036 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
8037 are data type nodes for the arguments of the function.
8038 If such a type has already been constructed, reuse it. */
8039
8040 tree
8041 build_function_type (tree value_type, tree arg_types)
8042 {
8043 tree t;
8044 hashval_t hashcode = 0;
8045 bool any_structural_p, any_noncanonical_p;
8046 tree canon_argtypes;
8047
8048 if (TREE_CODE (value_type) == FUNCTION_TYPE)
8049 {
8050 error ("function return type cannot be function");
8051 value_type = integer_type_node;
8052 }
8053
8054 /* Make a node of the sort we want. */
8055 t = make_node (FUNCTION_TYPE);
8056 TREE_TYPE (t) = value_type;
8057 TYPE_ARG_TYPES (t) = arg_types;
8058
8059 /* If we already have such a type, use the old one. */
8060 hashcode = iterative_hash_object (TYPE_HASH (value_type), hashcode);
8061 hashcode = type_hash_list (arg_types, hashcode);
8062 t = type_hash_canon (hashcode, t);
8063
8064 /* Set up the canonical type. */
8065 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8066 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8067 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8068 &any_structural_p,
8069 &any_noncanonical_p);
8070 if (any_structural_p)
8071 SET_TYPE_STRUCTURAL_EQUALITY (t);
8072 else if (any_noncanonical_p)
8073 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8074 canon_argtypes);
8075
8076 if (!COMPLETE_TYPE_P (t))
8077 layout_type (t);
8078 return t;
8079 }
8080
8081 /* Build a function type. The RETURN_TYPE is the type returned by the
8082 function. If VAARGS is set, no void_type_node is appended to the
8083 the list. ARGP must be always be terminated be a NULL_TREE. */
8084
8085 static tree
8086 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8087 {
8088 tree t, args, last;
8089
8090 t = va_arg (argp, tree);
8091 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8092 args = tree_cons (NULL_TREE, t, args);
8093
8094 if (vaargs)
8095 {
8096 last = args;
8097 if (args != NULL_TREE)
8098 args = nreverse (args);
8099 gcc_assert (last != void_list_node);
8100 }
8101 else if (args == NULL_TREE)
8102 args = void_list_node;
8103 else
8104 {
8105 last = args;
8106 args = nreverse (args);
8107 TREE_CHAIN (last) = void_list_node;
8108 }
8109 args = build_function_type (return_type, args);
8110
8111 return args;
8112 }
8113
8114 /* Build a function type. The RETURN_TYPE is the type returned by the
8115 function. If additional arguments are provided, they are
8116 additional argument types. The list of argument types must always
8117 be terminated by NULL_TREE. */
8118
8119 tree
8120 build_function_type_list (tree return_type, ...)
8121 {
8122 tree args;
8123 va_list p;
8124
8125 va_start (p, return_type);
8126 args = build_function_type_list_1 (false, return_type, p);
8127 va_end (p);
8128 return args;
8129 }
8130
8131 /* Build a variable argument function type. The RETURN_TYPE is the
8132 type returned by the function. If additional arguments are provided,
8133 they are additional argument types. The list of argument types must
8134 always be terminated by NULL_TREE. */
8135
8136 tree
8137 build_varargs_function_type_list (tree return_type, ...)
8138 {
8139 tree args;
8140 va_list p;
8141
8142 va_start (p, return_type);
8143 args = build_function_type_list_1 (true, return_type, p);
8144 va_end (p);
8145
8146 return args;
8147 }
8148
8149 /* Build a function type. RETURN_TYPE is the type returned by the
8150 function; VAARGS indicates whether the function takes varargs. The
8151 function takes N named arguments, the types of which are provided in
8152 ARG_TYPES. */
8153
8154 static tree
8155 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8156 tree *arg_types)
8157 {
8158 int i;
8159 tree t = vaargs ? NULL_TREE : void_list_node;
8160
8161 for (i = n - 1; i >= 0; i--)
8162 t = tree_cons (NULL_TREE, arg_types[i], t);
8163
8164 return build_function_type (return_type, t);
8165 }
8166
8167 /* Build a function type. RETURN_TYPE is the type returned by the
8168 function. The function takes N named arguments, the types of which
8169 are provided in ARG_TYPES. */
8170
8171 tree
8172 build_function_type_array (tree return_type, int n, tree *arg_types)
8173 {
8174 return build_function_type_array_1 (false, return_type, n, arg_types);
8175 }
8176
8177 /* Build a variable argument function type. RETURN_TYPE is the type
8178 returned by the function. The function takes N named arguments, the
8179 types of which are provided in ARG_TYPES. */
8180
8181 tree
8182 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8183 {
8184 return build_function_type_array_1 (true, return_type, n, arg_types);
8185 }
8186
8187 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8188 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8189 for the method. An implicit additional parameter (of type
8190 pointer-to-BASETYPE) is added to the ARGTYPES. */
8191
8192 tree
8193 build_method_type_directly (tree basetype,
8194 tree rettype,
8195 tree argtypes)
8196 {
8197 tree t;
8198 tree ptype;
8199 int hashcode = 0;
8200 bool any_structural_p, any_noncanonical_p;
8201 tree canon_argtypes;
8202
8203 /* Make a node of the sort we want. */
8204 t = make_node (METHOD_TYPE);
8205
8206 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8207 TREE_TYPE (t) = rettype;
8208 ptype = build_pointer_type (basetype);
8209
8210 /* The actual arglist for this function includes a "hidden" argument
8211 which is "this". Put it into the list of argument types. */
8212 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8213 TYPE_ARG_TYPES (t) = argtypes;
8214
8215 /* If we already have such a type, use the old one. */
8216 hashcode = iterative_hash_object (TYPE_HASH (basetype), hashcode);
8217 hashcode = iterative_hash_object (TYPE_HASH (rettype), hashcode);
8218 hashcode = type_hash_list (argtypes, hashcode);
8219 t = type_hash_canon (hashcode, t);
8220
8221 /* Set up the canonical type. */
8222 any_structural_p
8223 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8224 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8225 any_noncanonical_p
8226 = (TYPE_CANONICAL (basetype) != basetype
8227 || TYPE_CANONICAL (rettype) != rettype);
8228 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8229 &any_structural_p,
8230 &any_noncanonical_p);
8231 if (any_structural_p)
8232 SET_TYPE_STRUCTURAL_EQUALITY (t);
8233 else if (any_noncanonical_p)
8234 TYPE_CANONICAL (t)
8235 = build_method_type_directly (TYPE_CANONICAL (basetype),
8236 TYPE_CANONICAL (rettype),
8237 canon_argtypes);
8238 if (!COMPLETE_TYPE_P (t))
8239 layout_type (t);
8240
8241 return t;
8242 }
8243
8244 /* Construct, lay out and return the type of methods belonging to class
8245 BASETYPE and whose arguments and values are described by TYPE.
8246 If that type exists already, reuse it.
8247 TYPE must be a FUNCTION_TYPE node. */
8248
8249 tree
8250 build_method_type (tree basetype, tree type)
8251 {
8252 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8253
8254 return build_method_type_directly (basetype,
8255 TREE_TYPE (type),
8256 TYPE_ARG_TYPES (type));
8257 }
8258
8259 /* Construct, lay out and return the type of offsets to a value
8260 of type TYPE, within an object of type BASETYPE.
8261 If a suitable offset type exists already, reuse it. */
8262
8263 tree
8264 build_offset_type (tree basetype, tree type)
8265 {
8266 tree t;
8267 hashval_t hashcode = 0;
8268
8269 /* Make a node of the sort we want. */
8270 t = make_node (OFFSET_TYPE);
8271
8272 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8273 TREE_TYPE (t) = type;
8274
8275 /* If we already have such a type, use the old one. */
8276 hashcode = iterative_hash_object (TYPE_HASH (basetype), hashcode);
8277 hashcode = iterative_hash_object (TYPE_HASH (type), hashcode);
8278 t = type_hash_canon (hashcode, t);
8279
8280 if (!COMPLETE_TYPE_P (t))
8281 layout_type (t);
8282
8283 if (TYPE_CANONICAL (t) == t)
8284 {
8285 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8286 || TYPE_STRUCTURAL_EQUALITY_P (type))
8287 SET_TYPE_STRUCTURAL_EQUALITY (t);
8288 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8289 || TYPE_CANONICAL (type) != type)
8290 TYPE_CANONICAL (t)
8291 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8292 TYPE_CANONICAL (type));
8293 }
8294
8295 return t;
8296 }
8297
8298 /* Create a complex type whose components are COMPONENT_TYPE. */
8299
8300 tree
8301 build_complex_type (tree component_type)
8302 {
8303 tree t;
8304 hashval_t hashcode;
8305
8306 gcc_assert (INTEGRAL_TYPE_P (component_type)
8307 || SCALAR_FLOAT_TYPE_P (component_type)
8308 || FIXED_POINT_TYPE_P (component_type));
8309
8310 /* Make a node of the sort we want. */
8311 t = make_node (COMPLEX_TYPE);
8312
8313 TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type);
8314
8315 /* If we already have such a type, use the old one. */
8316 hashcode = iterative_hash_object (TYPE_HASH (component_type), 0);
8317 t = type_hash_canon (hashcode, t);
8318
8319 if (!COMPLETE_TYPE_P (t))
8320 layout_type (t);
8321
8322 if (TYPE_CANONICAL (t) == t)
8323 {
8324 if (TYPE_STRUCTURAL_EQUALITY_P (component_type))
8325 SET_TYPE_STRUCTURAL_EQUALITY (t);
8326 else if (TYPE_CANONICAL (component_type) != component_type)
8327 TYPE_CANONICAL (t)
8328 = build_complex_type (TYPE_CANONICAL (component_type));
8329 }
8330
8331 /* We need to create a name, since complex is a fundamental type. */
8332 if (! TYPE_NAME (t))
8333 {
8334 const char *name;
8335 if (component_type == char_type_node)
8336 name = "complex char";
8337 else if (component_type == signed_char_type_node)
8338 name = "complex signed char";
8339 else if (component_type == unsigned_char_type_node)
8340 name = "complex unsigned char";
8341 else if (component_type == short_integer_type_node)
8342 name = "complex short int";
8343 else if (component_type == short_unsigned_type_node)
8344 name = "complex short unsigned int";
8345 else if (component_type == integer_type_node)
8346 name = "complex int";
8347 else if (component_type == unsigned_type_node)
8348 name = "complex unsigned int";
8349 else if (component_type == long_integer_type_node)
8350 name = "complex long int";
8351 else if (component_type == long_unsigned_type_node)
8352 name = "complex long unsigned int";
8353 else if (component_type == long_long_integer_type_node)
8354 name = "complex long long int";
8355 else if (component_type == long_long_unsigned_type_node)
8356 name = "complex long long unsigned int";
8357 else
8358 name = 0;
8359
8360 if (name != 0)
8361 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8362 get_identifier (name), t);
8363 }
8364
8365 return build_qualified_type (t, TYPE_QUALS (component_type));
8366 }
8367
8368 /* If TYPE is a real or complex floating-point type and the target
8369 does not directly support arithmetic on TYPE then return the wider
8370 type to be used for arithmetic on TYPE. Otherwise, return
8371 NULL_TREE. */
8372
8373 tree
8374 excess_precision_type (tree type)
8375 {
8376 if (flag_excess_precision != EXCESS_PRECISION_FAST)
8377 {
8378 int flt_eval_method = TARGET_FLT_EVAL_METHOD;
8379 switch (TREE_CODE (type))
8380 {
8381 case REAL_TYPE:
8382 switch (flt_eval_method)
8383 {
8384 case 1:
8385 if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
8386 return double_type_node;
8387 break;
8388 case 2:
8389 if (TYPE_MODE (type) == TYPE_MODE (float_type_node)
8390 || TYPE_MODE (type) == TYPE_MODE (double_type_node))
8391 return long_double_type_node;
8392 break;
8393 default:
8394 gcc_unreachable ();
8395 }
8396 break;
8397 case COMPLEX_TYPE:
8398 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8399 return NULL_TREE;
8400 switch (flt_eval_method)
8401 {
8402 case 1:
8403 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node))
8404 return complex_double_type_node;
8405 break;
8406 case 2:
8407 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node)
8408 || (TYPE_MODE (TREE_TYPE (type))
8409 == TYPE_MODE (double_type_node)))
8410 return complex_long_double_type_node;
8411 break;
8412 default:
8413 gcc_unreachable ();
8414 }
8415 break;
8416 default:
8417 break;
8418 }
8419 }
8420 return NULL_TREE;
8421 }
8422 \f
8423 /* Return OP, stripped of any conversions to wider types as much as is safe.
8424 Converting the value back to OP's type makes a value equivalent to OP.
8425
8426 If FOR_TYPE is nonzero, we return a value which, if converted to
8427 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8428
8429 OP must have integer, real or enumeral type. Pointers are not allowed!
8430
8431 There are some cases where the obvious value we could return
8432 would regenerate to OP if converted to OP's type,
8433 but would not extend like OP to wider types.
8434 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8435 For example, if OP is (unsigned short)(signed char)-1,
8436 we avoid returning (signed char)-1 if FOR_TYPE is int,
8437 even though extending that to an unsigned short would regenerate OP,
8438 since the result of extending (signed char)-1 to (int)
8439 is different from (int) OP. */
8440
8441 tree
8442 get_unwidened (tree op, tree for_type)
8443 {
8444 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8445 tree type = TREE_TYPE (op);
8446 unsigned final_prec
8447 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8448 int uns
8449 = (for_type != 0 && for_type != type
8450 && final_prec > TYPE_PRECISION (type)
8451 && TYPE_UNSIGNED (type));
8452 tree win = op;
8453
8454 while (CONVERT_EXPR_P (op))
8455 {
8456 int bitschange;
8457
8458 /* TYPE_PRECISION on vector types has different meaning
8459 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8460 so avoid them here. */
8461 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8462 break;
8463
8464 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8465 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8466
8467 /* Truncations are many-one so cannot be removed.
8468 Unless we are later going to truncate down even farther. */
8469 if (bitschange < 0
8470 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8471 break;
8472
8473 /* See what's inside this conversion. If we decide to strip it,
8474 we will set WIN. */
8475 op = TREE_OPERAND (op, 0);
8476
8477 /* If we have not stripped any zero-extensions (uns is 0),
8478 we can strip any kind of extension.
8479 If we have previously stripped a zero-extension,
8480 only zero-extensions can safely be stripped.
8481 Any extension can be stripped if the bits it would produce
8482 are all going to be discarded later by truncating to FOR_TYPE. */
8483
8484 if (bitschange > 0)
8485 {
8486 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8487 win = op;
8488 /* TYPE_UNSIGNED says whether this is a zero-extension.
8489 Let's avoid computing it if it does not affect WIN
8490 and if UNS will not be needed again. */
8491 if ((uns
8492 || CONVERT_EXPR_P (op))
8493 && TYPE_UNSIGNED (TREE_TYPE (op)))
8494 {
8495 uns = 1;
8496 win = op;
8497 }
8498 }
8499 }
8500
8501 /* If we finally reach a constant see if it fits in for_type and
8502 in that case convert it. */
8503 if (for_type
8504 && TREE_CODE (win) == INTEGER_CST
8505 && TREE_TYPE (win) != for_type
8506 && int_fits_type_p (win, for_type))
8507 win = fold_convert (for_type, win);
8508
8509 return win;
8510 }
8511 \f
8512 /* Return OP or a simpler expression for a narrower value
8513 which can be sign-extended or zero-extended to give back OP.
8514 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8515 or 0 if the value should be sign-extended. */
8516
8517 tree
8518 get_narrower (tree op, int *unsignedp_ptr)
8519 {
8520 int uns = 0;
8521 int first = 1;
8522 tree win = op;
8523 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8524
8525 while (TREE_CODE (op) == NOP_EXPR)
8526 {
8527 int bitschange
8528 = (TYPE_PRECISION (TREE_TYPE (op))
8529 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8530
8531 /* Truncations are many-one so cannot be removed. */
8532 if (bitschange < 0)
8533 break;
8534
8535 /* See what's inside this conversion. If we decide to strip it,
8536 we will set WIN. */
8537
8538 if (bitschange > 0)
8539 {
8540 op = TREE_OPERAND (op, 0);
8541 /* An extension: the outermost one can be stripped,
8542 but remember whether it is zero or sign extension. */
8543 if (first)
8544 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8545 /* Otherwise, if a sign extension has been stripped,
8546 only sign extensions can now be stripped;
8547 if a zero extension has been stripped, only zero-extensions. */
8548 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8549 break;
8550 first = 0;
8551 }
8552 else /* bitschange == 0 */
8553 {
8554 /* A change in nominal type can always be stripped, but we must
8555 preserve the unsignedness. */
8556 if (first)
8557 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8558 first = 0;
8559 op = TREE_OPERAND (op, 0);
8560 /* Keep trying to narrow, but don't assign op to win if it
8561 would turn an integral type into something else. */
8562 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8563 continue;
8564 }
8565
8566 win = op;
8567 }
8568
8569 if (TREE_CODE (op) == COMPONENT_REF
8570 /* Since type_for_size always gives an integer type. */
8571 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8572 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8573 /* Ensure field is laid out already. */
8574 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8575 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8576 {
8577 unsigned HOST_WIDE_INT innerprec
8578 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8579 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8580 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8581 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8582
8583 /* We can get this structure field in a narrower type that fits it,
8584 but the resulting extension to its nominal type (a fullword type)
8585 must satisfy the same conditions as for other extensions.
8586
8587 Do this only for fields that are aligned (not bit-fields),
8588 because when bit-field insns will be used there is no
8589 advantage in doing this. */
8590
8591 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8592 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8593 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8594 && type != 0)
8595 {
8596 if (first)
8597 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8598 win = fold_convert (type, op);
8599 }
8600 }
8601
8602 *unsignedp_ptr = uns;
8603 return win;
8604 }
8605 \f
8606 /* Returns true if integer constant C has a value that is permissible
8607 for type TYPE (an INTEGER_TYPE). */
8608
8609 bool
8610 int_fits_type_p (const_tree c, const_tree type)
8611 {
8612 tree type_low_bound, type_high_bound;
8613 bool ok_for_low_bound, ok_for_high_bound;
8614 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8615
8616 retry:
8617 type_low_bound = TYPE_MIN_VALUE (type);
8618 type_high_bound = TYPE_MAX_VALUE (type);
8619
8620 /* If at least one bound of the type is a constant integer, we can check
8621 ourselves and maybe make a decision. If no such decision is possible, but
8622 this type is a subtype, try checking against that. Otherwise, use
8623 fits_to_tree_p, which checks against the precision.
8624
8625 Compute the status for each possibly constant bound, and return if we see
8626 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8627 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8628 for "constant known to fit". */
8629
8630 /* Check if c >= type_low_bound. */
8631 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8632 {
8633 if (tree_int_cst_lt (c, type_low_bound))
8634 return false;
8635 ok_for_low_bound = true;
8636 }
8637 else
8638 ok_for_low_bound = false;
8639
8640 /* Check if c <= type_high_bound. */
8641 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8642 {
8643 if (tree_int_cst_lt (type_high_bound, c))
8644 return false;
8645 ok_for_high_bound = true;
8646 }
8647 else
8648 ok_for_high_bound = false;
8649
8650 /* If the constant fits both bounds, the result is known. */
8651 if (ok_for_low_bound && ok_for_high_bound)
8652 return true;
8653
8654 /* Perform some generic filtering which may allow making a decision
8655 even if the bounds are not constant. First, negative integers
8656 never fit in unsigned types, */
8657 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (c))
8658 return false;
8659
8660 /* Second, narrower types always fit in wider ones. */
8661 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8662 return true;
8663
8664 /* Third, unsigned integers with top bit set never fit signed types. */
8665 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8666 {
8667 int prec = GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (c))) - 1;
8668 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8669 {
8670 /* When a tree_cst is converted to a wide-int, the precision
8671 is taken from the type. However, if the precision of the
8672 mode underneath the type is smaller than that, it is
8673 possible that the value will not fit. The test below
8674 fails if any bit is set between the sign bit of the
8675 underlying mode and the top bit of the type. */
8676 if (wi::ne_p (wi::zext (c, prec - 1), c))
8677 return false;
8678 }
8679 else if (wi::neg_p (c))
8680 return false;
8681 }
8682
8683 /* If we haven't been able to decide at this point, there nothing more we
8684 can check ourselves here. Look at the base type if we have one and it
8685 has the same precision. */
8686 if (TREE_CODE (type) == INTEGER_TYPE
8687 && TREE_TYPE (type) != 0
8688 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8689 {
8690 type = TREE_TYPE (type);
8691 goto retry;
8692 }
8693
8694 /* Or to fits_to_tree_p, if nothing else. */
8695 return wi::fits_to_tree_p (c, type);
8696 }
8697
8698 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8699 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8700 represented (assuming two's-complement arithmetic) within the bit
8701 precision of the type are returned instead. */
8702
8703 void
8704 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8705 {
8706 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8707 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8708 wi::to_mpz (TYPE_MIN_VALUE (type), min, TYPE_SIGN (type));
8709 else
8710 {
8711 if (TYPE_UNSIGNED (type))
8712 mpz_set_ui (min, 0);
8713 else
8714 {
8715 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8716 wi::to_mpz (mn, min, SIGNED);
8717 }
8718 }
8719
8720 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8721 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8722 wi::to_mpz (TYPE_MAX_VALUE (type), max, TYPE_SIGN (type));
8723 else
8724 {
8725 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8726 wi::to_mpz (mn, max, TYPE_SIGN (type));
8727 }
8728 }
8729
8730 /* Return true if VAR is an automatic variable defined in function FN. */
8731
8732 bool
8733 auto_var_in_fn_p (const_tree var, const_tree fn)
8734 {
8735 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8736 && ((((TREE_CODE (var) == VAR_DECL && ! DECL_EXTERNAL (var))
8737 || TREE_CODE (var) == PARM_DECL)
8738 && ! TREE_STATIC (var))
8739 || TREE_CODE (var) == LABEL_DECL
8740 || TREE_CODE (var) == RESULT_DECL));
8741 }
8742
8743 /* Subprogram of following function. Called by walk_tree.
8744
8745 Return *TP if it is an automatic variable or parameter of the
8746 function passed in as DATA. */
8747
8748 static tree
8749 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8750 {
8751 tree fn = (tree) data;
8752
8753 if (TYPE_P (*tp))
8754 *walk_subtrees = 0;
8755
8756 else if (DECL_P (*tp)
8757 && auto_var_in_fn_p (*tp, fn))
8758 return *tp;
8759
8760 return NULL_TREE;
8761 }
8762
8763 /* Returns true if T is, contains, or refers to a type with variable
8764 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8765 arguments, but not the return type. If FN is nonzero, only return
8766 true if a modifier of the type or position of FN is a variable or
8767 parameter inside FN.
8768
8769 This concept is more general than that of C99 'variably modified types':
8770 in C99, a struct type is never variably modified because a VLA may not
8771 appear as a structure member. However, in GNU C code like:
8772
8773 struct S { int i[f()]; };
8774
8775 is valid, and other languages may define similar constructs. */
8776
8777 bool
8778 variably_modified_type_p (tree type, tree fn)
8779 {
8780 tree t;
8781
8782 /* Test if T is either variable (if FN is zero) or an expression containing
8783 a variable in FN. If TYPE isn't gimplified, return true also if
8784 gimplify_one_sizepos would gimplify the expression into a local
8785 variable. */
8786 #define RETURN_TRUE_IF_VAR(T) \
8787 do { tree _t = (T); \
8788 if (_t != NULL_TREE \
8789 && _t != error_mark_node \
8790 && TREE_CODE (_t) != INTEGER_CST \
8791 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8792 && (!fn \
8793 || (!TYPE_SIZES_GIMPLIFIED (type) \
8794 && !is_gimple_sizepos (_t)) \
8795 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8796 return true; } while (0)
8797
8798 if (type == error_mark_node)
8799 return false;
8800
8801 /* If TYPE itself has variable size, it is variably modified. */
8802 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8803 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8804
8805 switch (TREE_CODE (type))
8806 {
8807 case POINTER_TYPE:
8808 case REFERENCE_TYPE:
8809 case VECTOR_TYPE:
8810 if (variably_modified_type_p (TREE_TYPE (type), fn))
8811 return true;
8812 break;
8813
8814 case FUNCTION_TYPE:
8815 case METHOD_TYPE:
8816 /* If TYPE is a function type, it is variably modified if the
8817 return type is variably modified. */
8818 if (variably_modified_type_p (TREE_TYPE (type), fn))
8819 return true;
8820 break;
8821
8822 case INTEGER_TYPE:
8823 case REAL_TYPE:
8824 case FIXED_POINT_TYPE:
8825 case ENUMERAL_TYPE:
8826 case BOOLEAN_TYPE:
8827 /* Scalar types are variably modified if their end points
8828 aren't constant. */
8829 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8830 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8831 break;
8832
8833 case RECORD_TYPE:
8834 case UNION_TYPE:
8835 case QUAL_UNION_TYPE:
8836 /* We can't see if any of the fields are variably-modified by the
8837 definition we normally use, since that would produce infinite
8838 recursion via pointers. */
8839 /* This is variably modified if some field's type is. */
8840 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8841 if (TREE_CODE (t) == FIELD_DECL)
8842 {
8843 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8844 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8845 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8846
8847 if (TREE_CODE (type) == QUAL_UNION_TYPE)
8848 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
8849 }
8850 break;
8851
8852 case ARRAY_TYPE:
8853 /* Do not call ourselves to avoid infinite recursion. This is
8854 variably modified if the element type is. */
8855 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
8856 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
8857 break;
8858
8859 default:
8860 break;
8861 }
8862
8863 /* The current language may have other cases to check, but in general,
8864 all other types are not variably modified. */
8865 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
8866
8867 #undef RETURN_TRUE_IF_VAR
8868 }
8869
8870 /* Given a DECL or TYPE, return the scope in which it was declared, or
8871 NULL_TREE if there is no containing scope. */
8872
8873 tree
8874 get_containing_scope (const_tree t)
8875 {
8876 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
8877 }
8878
8879 /* Return the innermost context enclosing DECL that is
8880 a FUNCTION_DECL, or zero if none. */
8881
8882 tree
8883 decl_function_context (const_tree decl)
8884 {
8885 tree context;
8886
8887 if (TREE_CODE (decl) == ERROR_MARK)
8888 return 0;
8889
8890 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8891 where we look up the function at runtime. Such functions always take
8892 a first argument of type 'pointer to real context'.
8893
8894 C++ should really be fixed to use DECL_CONTEXT for the real context,
8895 and use something else for the "virtual context". */
8896 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
8897 context
8898 = TYPE_MAIN_VARIANT
8899 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
8900 else
8901 context = DECL_CONTEXT (decl);
8902
8903 while (context && TREE_CODE (context) != FUNCTION_DECL)
8904 {
8905 if (TREE_CODE (context) == BLOCK)
8906 context = BLOCK_SUPERCONTEXT (context);
8907 else
8908 context = get_containing_scope (context);
8909 }
8910
8911 return context;
8912 }
8913
8914 /* Return the innermost context enclosing DECL that is
8915 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8916 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8917
8918 tree
8919 decl_type_context (const_tree decl)
8920 {
8921 tree context = DECL_CONTEXT (decl);
8922
8923 while (context)
8924 switch (TREE_CODE (context))
8925 {
8926 case NAMESPACE_DECL:
8927 case TRANSLATION_UNIT_DECL:
8928 return NULL_TREE;
8929
8930 case RECORD_TYPE:
8931 case UNION_TYPE:
8932 case QUAL_UNION_TYPE:
8933 return context;
8934
8935 case TYPE_DECL:
8936 case FUNCTION_DECL:
8937 context = DECL_CONTEXT (context);
8938 break;
8939
8940 case BLOCK:
8941 context = BLOCK_SUPERCONTEXT (context);
8942 break;
8943
8944 default:
8945 gcc_unreachable ();
8946 }
8947
8948 return NULL_TREE;
8949 }
8950
8951 /* CALL is a CALL_EXPR. Return the declaration for the function
8952 called, or NULL_TREE if the called function cannot be
8953 determined. */
8954
8955 tree
8956 get_callee_fndecl (const_tree call)
8957 {
8958 tree addr;
8959
8960 if (call == error_mark_node)
8961 return error_mark_node;
8962
8963 /* It's invalid to call this function with anything but a
8964 CALL_EXPR. */
8965 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8966
8967 /* The first operand to the CALL is the address of the function
8968 called. */
8969 addr = CALL_EXPR_FN (call);
8970
8971 /* If there is no function, return early. */
8972 if (addr == NULL_TREE)
8973 return NULL_TREE;
8974
8975 STRIP_NOPS (addr);
8976
8977 /* If this is a readonly function pointer, extract its initial value. */
8978 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
8979 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
8980 && DECL_INITIAL (addr))
8981 addr = DECL_INITIAL (addr);
8982
8983 /* If the address is just `&f' for some function `f', then we know
8984 that `f' is being called. */
8985 if (TREE_CODE (addr) == ADDR_EXPR
8986 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
8987 return TREE_OPERAND (addr, 0);
8988
8989 /* We couldn't figure out what was being called. */
8990 return NULL_TREE;
8991 }
8992
8993 /* Print debugging information about tree nodes generated during the compile,
8994 and any language-specific information. */
8995
8996 void
8997 dump_tree_statistics (void)
8998 {
8999 if (GATHER_STATISTICS)
9000 {
9001 int i;
9002 int total_nodes, total_bytes;
9003 fprintf (stderr, "Kind Nodes Bytes\n");
9004 fprintf (stderr, "---------------------------------------\n");
9005 total_nodes = total_bytes = 0;
9006 for (i = 0; i < (int) all_kinds; i++)
9007 {
9008 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i],
9009 tree_node_counts[i], tree_node_sizes[i]);
9010 total_nodes += tree_node_counts[i];
9011 total_bytes += tree_node_sizes[i];
9012 }
9013 fprintf (stderr, "---------------------------------------\n");
9014 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes);
9015 fprintf (stderr, "---------------------------------------\n");
9016 fprintf (stderr, "Code Nodes\n");
9017 fprintf (stderr, "----------------------------\n");
9018 for (i = 0; i < (int) MAX_TREE_CODES; i++)
9019 fprintf (stderr, "%-20s %7d\n", get_tree_code_name ((enum tree_code) i),
9020 tree_code_counts[i]);
9021 fprintf (stderr, "----------------------------\n");
9022 ssanames_print_statistics ();
9023 phinodes_print_statistics ();
9024 }
9025 else
9026 fprintf (stderr, "(No per-node statistics)\n");
9027
9028 print_type_hash_statistics ();
9029 print_debug_expr_statistics ();
9030 print_value_expr_statistics ();
9031 lang_hooks.print_statistics ();
9032 }
9033 \f
9034 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9035
9036 /* Generate a crc32 of a byte. */
9037
9038 static unsigned
9039 crc32_unsigned_bits (unsigned chksum, unsigned value, unsigned bits)
9040 {
9041 unsigned ix;
9042
9043 for (ix = bits; ix--; value <<= 1)
9044 {
9045 unsigned feedback;
9046
9047 feedback = (value ^ chksum) & 0x80000000 ? 0x04c11db7 : 0;
9048 chksum <<= 1;
9049 chksum ^= feedback;
9050 }
9051 return chksum;
9052 }
9053
9054 /* Generate a crc32 of a 32-bit unsigned. */
9055
9056 unsigned
9057 crc32_unsigned (unsigned chksum, unsigned value)
9058 {
9059 return crc32_unsigned_bits (chksum, value, 32);
9060 }
9061
9062 /* Generate a crc32 of a byte. */
9063
9064 unsigned
9065 crc32_byte (unsigned chksum, char byte)
9066 {
9067 return crc32_unsigned_bits (chksum, (unsigned) byte << 24, 8);
9068 }
9069
9070 /* Generate a crc32 of a string. */
9071
9072 unsigned
9073 crc32_string (unsigned chksum, const char *string)
9074 {
9075 do
9076 {
9077 chksum = crc32_byte (chksum, *string);
9078 }
9079 while (*string++);
9080 return chksum;
9081 }
9082
9083 /* P is a string that will be used in a symbol. Mask out any characters
9084 that are not valid in that context. */
9085
9086 void
9087 clean_symbol_name (char *p)
9088 {
9089 for (; *p; p++)
9090 if (! (ISALNUM (*p)
9091 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9092 || *p == '$'
9093 #endif
9094 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9095 || *p == '.'
9096 #endif
9097 ))
9098 *p = '_';
9099 }
9100
9101 /* Generate a name for a special-purpose function.
9102 The generated name may need to be unique across the whole link.
9103 Changes to this function may also require corresponding changes to
9104 xstrdup_mask_random.
9105 TYPE is some string to identify the purpose of this function to the
9106 linker or collect2; it must start with an uppercase letter,
9107 one of:
9108 I - for constructors
9109 D - for destructors
9110 N - for C++ anonymous namespaces
9111 F - for DWARF unwind frame information. */
9112
9113 tree
9114 get_file_function_name (const char *type)
9115 {
9116 char *buf;
9117 const char *p;
9118 char *q;
9119
9120 /* If we already have a name we know to be unique, just use that. */
9121 if (first_global_object_name)
9122 p = q = ASTRDUP (first_global_object_name);
9123 /* If the target is handling the constructors/destructors, they
9124 will be local to this file and the name is only necessary for
9125 debugging purposes.
9126 We also assign sub_I and sub_D sufixes to constructors called from
9127 the global static constructors. These are always local. */
9128 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9129 || (strncmp (type, "sub_", 4) == 0
9130 && (type[4] == 'I' || type[4] == 'D')))
9131 {
9132 const char *file = main_input_filename;
9133 if (! file)
9134 file = LOCATION_FILE (input_location);
9135 /* Just use the file's basename, because the full pathname
9136 might be quite long. */
9137 p = q = ASTRDUP (lbasename (file));
9138 }
9139 else
9140 {
9141 /* Otherwise, the name must be unique across the entire link.
9142 We don't have anything that we know to be unique to this translation
9143 unit, so use what we do have and throw in some randomness. */
9144 unsigned len;
9145 const char *name = weak_global_object_name;
9146 const char *file = main_input_filename;
9147
9148 if (! name)
9149 name = "";
9150 if (! file)
9151 file = LOCATION_FILE (input_location);
9152
9153 len = strlen (file);
9154 q = (char *) alloca (9 + 17 + len + 1);
9155 memcpy (q, file, len + 1);
9156
9157 snprintf (q + len, 9 + 17 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9158 crc32_string (0, name), get_random_seed (false));
9159
9160 p = q;
9161 }
9162
9163 clean_symbol_name (q);
9164 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9165 + strlen (type));
9166
9167 /* Set up the name of the file-level functions we may need.
9168 Use a global object (which is already required to be unique over
9169 the program) rather than the file name (which imposes extra
9170 constraints). */
9171 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9172
9173 return get_identifier (buf);
9174 }
9175 \f
9176 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9177
9178 /* Complain that the tree code of NODE does not match the expected 0
9179 terminated list of trailing codes. The trailing code list can be
9180 empty, for a more vague error message. FILE, LINE, and FUNCTION
9181 are of the caller. */
9182
9183 void
9184 tree_check_failed (const_tree node, const char *file,
9185 int line, const char *function, ...)
9186 {
9187 va_list args;
9188 const char *buffer;
9189 unsigned length = 0;
9190 enum tree_code code;
9191
9192 va_start (args, function);
9193 while ((code = (enum tree_code) va_arg (args, int)))
9194 length += 4 + strlen (get_tree_code_name (code));
9195 va_end (args);
9196 if (length)
9197 {
9198 char *tmp;
9199 va_start (args, function);
9200 length += strlen ("expected ");
9201 buffer = tmp = (char *) alloca (length);
9202 length = 0;
9203 while ((code = (enum tree_code) va_arg (args, int)))
9204 {
9205 const char *prefix = length ? " or " : "expected ";
9206
9207 strcpy (tmp + length, prefix);
9208 length += strlen (prefix);
9209 strcpy (tmp + length, get_tree_code_name (code));
9210 length += strlen (get_tree_code_name (code));
9211 }
9212 va_end (args);
9213 }
9214 else
9215 buffer = "unexpected node";
9216
9217 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9218 buffer, get_tree_code_name (TREE_CODE (node)),
9219 function, trim_filename (file), line);
9220 }
9221
9222 /* Complain that the tree code of NODE does match the expected 0
9223 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9224 the caller. */
9225
9226 void
9227 tree_not_check_failed (const_tree node, const char *file,
9228 int line, const char *function, ...)
9229 {
9230 va_list args;
9231 char *buffer;
9232 unsigned length = 0;
9233 enum tree_code code;
9234
9235 va_start (args, function);
9236 while ((code = (enum tree_code) va_arg (args, int)))
9237 length += 4 + strlen (get_tree_code_name (code));
9238 va_end (args);
9239 va_start (args, function);
9240 buffer = (char *) alloca (length);
9241 length = 0;
9242 while ((code = (enum tree_code) va_arg (args, int)))
9243 {
9244 if (length)
9245 {
9246 strcpy (buffer + length, " or ");
9247 length += 4;
9248 }
9249 strcpy (buffer + length, get_tree_code_name (code));
9250 length += strlen (get_tree_code_name (code));
9251 }
9252 va_end (args);
9253
9254 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9255 buffer, get_tree_code_name (TREE_CODE (node)),
9256 function, trim_filename (file), line);
9257 }
9258
9259 /* Similar to tree_check_failed, except that we check for a class of tree
9260 code, given in CL. */
9261
9262 void
9263 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9264 const char *file, int line, const char *function)
9265 {
9266 internal_error
9267 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9268 TREE_CODE_CLASS_STRING (cl),
9269 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9270 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9271 }
9272
9273 /* Similar to tree_check_failed, except that instead of specifying a
9274 dozen codes, use the knowledge that they're all sequential. */
9275
9276 void
9277 tree_range_check_failed (const_tree node, const char *file, int line,
9278 const char *function, enum tree_code c1,
9279 enum tree_code c2)
9280 {
9281 char *buffer;
9282 unsigned length = 0;
9283 unsigned int c;
9284
9285 for (c = c1; c <= c2; ++c)
9286 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9287
9288 length += strlen ("expected ");
9289 buffer = (char *) alloca (length);
9290 length = 0;
9291
9292 for (c = c1; c <= c2; ++c)
9293 {
9294 const char *prefix = length ? " or " : "expected ";
9295
9296 strcpy (buffer + length, prefix);
9297 length += strlen (prefix);
9298 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9299 length += strlen (get_tree_code_name ((enum tree_code) c));
9300 }
9301
9302 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9303 buffer, get_tree_code_name (TREE_CODE (node)),
9304 function, trim_filename (file), line);
9305 }
9306
9307
9308 /* Similar to tree_check_failed, except that we check that a tree does
9309 not have the specified code, given in CL. */
9310
9311 void
9312 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9313 const char *file, int line, const char *function)
9314 {
9315 internal_error
9316 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9317 TREE_CODE_CLASS_STRING (cl),
9318 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9319 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9320 }
9321
9322
9323 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9324
9325 void
9326 omp_clause_check_failed (const_tree node, const char *file, int line,
9327 const char *function, enum omp_clause_code code)
9328 {
9329 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9330 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9331 function, trim_filename (file), line);
9332 }
9333
9334
9335 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9336
9337 void
9338 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9339 const char *function, enum omp_clause_code c1,
9340 enum omp_clause_code c2)
9341 {
9342 char *buffer;
9343 unsigned length = 0;
9344 unsigned int c;
9345
9346 for (c = c1; c <= c2; ++c)
9347 length += 4 + strlen (omp_clause_code_name[c]);
9348
9349 length += strlen ("expected ");
9350 buffer = (char *) alloca (length);
9351 length = 0;
9352
9353 for (c = c1; c <= c2; ++c)
9354 {
9355 const char *prefix = length ? " or " : "expected ";
9356
9357 strcpy (buffer + length, prefix);
9358 length += strlen (prefix);
9359 strcpy (buffer + length, omp_clause_code_name[c]);
9360 length += strlen (omp_clause_code_name[c]);
9361 }
9362
9363 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9364 buffer, omp_clause_code_name[TREE_CODE (node)],
9365 function, trim_filename (file), line);
9366 }
9367
9368
9369 #undef DEFTREESTRUCT
9370 #define DEFTREESTRUCT(VAL, NAME) NAME,
9371
9372 static const char *ts_enum_names[] = {
9373 #include "treestruct.def"
9374 };
9375 #undef DEFTREESTRUCT
9376
9377 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9378
9379 /* Similar to tree_class_check_failed, except that we check for
9380 whether CODE contains the tree structure identified by EN. */
9381
9382 void
9383 tree_contains_struct_check_failed (const_tree node,
9384 const enum tree_node_structure_enum en,
9385 const char *file, int line,
9386 const char *function)
9387 {
9388 internal_error
9389 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9390 TS_ENUM_NAME (en),
9391 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9392 }
9393
9394
9395 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9396 (dynamically sized) vector. */
9397
9398 void
9399 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9400 const char *function)
9401 {
9402 internal_error
9403 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9404 idx + 1, len, function, trim_filename (file), line);
9405 }
9406
9407 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9408 (dynamically sized) vector. */
9409
9410 void
9411 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9412 const char *function)
9413 {
9414 internal_error
9415 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9416 idx + 1, len, function, trim_filename (file), line);
9417 }
9418
9419 /* Similar to above, except that the check is for the bounds of the operand
9420 vector of an expression node EXP. */
9421
9422 void
9423 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9424 int line, const char *function)
9425 {
9426 enum tree_code code = TREE_CODE (exp);
9427 internal_error
9428 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9429 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9430 function, trim_filename (file), line);
9431 }
9432
9433 /* Similar to above, except that the check is for the number of
9434 operands of an OMP_CLAUSE node. */
9435
9436 void
9437 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9438 int line, const char *function)
9439 {
9440 internal_error
9441 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9442 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9443 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9444 trim_filename (file), line);
9445 }
9446 #endif /* ENABLE_TREE_CHECKING */
9447 \f
9448 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9449 and mapped to the machine mode MODE. Initialize its fields and build
9450 the information necessary for debugging output. */
9451
9452 static tree
9453 make_vector_type (tree innertype, int nunits, enum machine_mode mode)
9454 {
9455 tree t;
9456 hashval_t hashcode = 0;
9457
9458 t = make_node (VECTOR_TYPE);
9459 TREE_TYPE (t) = TYPE_MAIN_VARIANT (innertype);
9460 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9461 SET_TYPE_MODE (t, mode);
9462
9463 if (TYPE_STRUCTURAL_EQUALITY_P (innertype))
9464 SET_TYPE_STRUCTURAL_EQUALITY (t);
9465 else if (TYPE_CANONICAL (innertype) != innertype
9466 || mode != VOIDmode)
9467 TYPE_CANONICAL (t)
9468 = make_vector_type (TYPE_CANONICAL (innertype), nunits, VOIDmode);
9469
9470 layout_type (t);
9471
9472 hashcode = iterative_hash_host_wide_int (VECTOR_TYPE, hashcode);
9473 hashcode = iterative_hash_host_wide_int (nunits, hashcode);
9474 hashcode = iterative_hash_host_wide_int (mode, hashcode);
9475 hashcode = iterative_hash_object (TYPE_HASH (TREE_TYPE (t)), hashcode);
9476 t = type_hash_canon (hashcode, t);
9477
9478 /* We have built a main variant, based on the main variant of the
9479 inner type. Use it to build the variant we return. */
9480 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9481 && TREE_TYPE (t) != innertype)
9482 return build_type_attribute_qual_variant (t,
9483 TYPE_ATTRIBUTES (innertype),
9484 TYPE_QUALS (innertype));
9485
9486 return t;
9487 }
9488
9489 static tree
9490 make_or_reuse_type (unsigned size, int unsignedp)
9491 {
9492 if (size == INT_TYPE_SIZE)
9493 return unsignedp ? unsigned_type_node : integer_type_node;
9494 if (size == CHAR_TYPE_SIZE)
9495 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9496 if (size == SHORT_TYPE_SIZE)
9497 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9498 if (size == LONG_TYPE_SIZE)
9499 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9500 if (size == LONG_LONG_TYPE_SIZE)
9501 return (unsignedp ? long_long_unsigned_type_node
9502 : long_long_integer_type_node);
9503 if (size == 128 && int128_integer_type_node)
9504 return (unsignedp ? int128_unsigned_type_node
9505 : int128_integer_type_node);
9506
9507 if (unsignedp)
9508 return make_unsigned_type (size);
9509 else
9510 return make_signed_type (size);
9511 }
9512
9513 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9514
9515 static tree
9516 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9517 {
9518 if (satp)
9519 {
9520 if (size == SHORT_FRACT_TYPE_SIZE)
9521 return unsignedp ? sat_unsigned_short_fract_type_node
9522 : sat_short_fract_type_node;
9523 if (size == FRACT_TYPE_SIZE)
9524 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9525 if (size == LONG_FRACT_TYPE_SIZE)
9526 return unsignedp ? sat_unsigned_long_fract_type_node
9527 : sat_long_fract_type_node;
9528 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9529 return unsignedp ? sat_unsigned_long_long_fract_type_node
9530 : sat_long_long_fract_type_node;
9531 }
9532 else
9533 {
9534 if (size == SHORT_FRACT_TYPE_SIZE)
9535 return unsignedp ? unsigned_short_fract_type_node
9536 : short_fract_type_node;
9537 if (size == FRACT_TYPE_SIZE)
9538 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9539 if (size == LONG_FRACT_TYPE_SIZE)
9540 return unsignedp ? unsigned_long_fract_type_node
9541 : long_fract_type_node;
9542 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9543 return unsignedp ? unsigned_long_long_fract_type_node
9544 : long_long_fract_type_node;
9545 }
9546
9547 return make_fract_type (size, unsignedp, satp);
9548 }
9549
9550 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9551
9552 static tree
9553 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9554 {
9555 if (satp)
9556 {
9557 if (size == SHORT_ACCUM_TYPE_SIZE)
9558 return unsignedp ? sat_unsigned_short_accum_type_node
9559 : sat_short_accum_type_node;
9560 if (size == ACCUM_TYPE_SIZE)
9561 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9562 if (size == LONG_ACCUM_TYPE_SIZE)
9563 return unsignedp ? sat_unsigned_long_accum_type_node
9564 : sat_long_accum_type_node;
9565 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9566 return unsignedp ? sat_unsigned_long_long_accum_type_node
9567 : sat_long_long_accum_type_node;
9568 }
9569 else
9570 {
9571 if (size == SHORT_ACCUM_TYPE_SIZE)
9572 return unsignedp ? unsigned_short_accum_type_node
9573 : short_accum_type_node;
9574 if (size == ACCUM_TYPE_SIZE)
9575 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9576 if (size == LONG_ACCUM_TYPE_SIZE)
9577 return unsignedp ? unsigned_long_accum_type_node
9578 : long_accum_type_node;
9579 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9580 return unsignedp ? unsigned_long_long_accum_type_node
9581 : long_long_accum_type_node;
9582 }
9583
9584 return make_accum_type (size, unsignedp, satp);
9585 }
9586
9587
9588 /* Create an atomic variant node for TYPE. This routine is called
9589 during initialization of data types to create the 5 basic atomic
9590 types. The generic build_variant_type function requires these to
9591 already be set up in order to function properly, so cannot be
9592 called from there. If ALIGN is non-zero, then ensure alignment is
9593 overridden to this value. */
9594
9595 static tree
9596 build_atomic_base (tree type, unsigned int align)
9597 {
9598 tree t;
9599
9600 /* Make sure its not already registered. */
9601 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9602 return t;
9603
9604 t = build_variant_type_copy (type);
9605 set_type_quals (t, TYPE_QUAL_ATOMIC);
9606
9607 if (align)
9608 TYPE_ALIGN (t) = align;
9609
9610 return t;
9611 }
9612
9613 /* Create nodes for all integer types (and error_mark_node) using the sizes
9614 of C datatypes. SIGNED_CHAR specifies whether char is signed,
9615 SHORT_DOUBLE specifies whether double should be of the same precision
9616 as float. */
9617
9618 void
9619 build_common_tree_nodes (bool signed_char, bool short_double)
9620 {
9621 error_mark_node = make_node (ERROR_MARK);
9622 TREE_TYPE (error_mark_node) = error_mark_node;
9623
9624 initialize_sizetypes ();
9625
9626 /* Define both `signed char' and `unsigned char'. */
9627 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9628 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9629 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9630 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9631
9632 /* Define `char', which is like either `signed char' or `unsigned char'
9633 but not the same as either. */
9634 char_type_node
9635 = (signed_char
9636 ? make_signed_type (CHAR_TYPE_SIZE)
9637 : make_unsigned_type (CHAR_TYPE_SIZE));
9638 TYPE_STRING_FLAG (char_type_node) = 1;
9639
9640 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9641 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9642 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9643 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9644 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9645 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9646 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9647 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9648 #if HOST_BITS_PER_WIDE_INT >= 64
9649 /* TODO: This isn't correct, but as logic depends at the moment on
9650 host's instead of target's wide-integer.
9651 If there is a target not supporting TImode, but has an 128-bit
9652 integer-scalar register, this target check needs to be adjusted. */
9653 if (targetm.scalar_mode_supported_p (TImode))
9654 {
9655 int128_integer_type_node = make_signed_type (128);
9656 int128_unsigned_type_node = make_unsigned_type (128);
9657 }
9658 #endif
9659
9660 /* Define a boolean type. This type only represents boolean values but
9661 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9662 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9663 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9664 TYPE_PRECISION (boolean_type_node) = 1;
9665 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9666
9667 /* Define what type to use for size_t. */
9668 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9669 size_type_node = unsigned_type_node;
9670 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9671 size_type_node = long_unsigned_type_node;
9672 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9673 size_type_node = long_long_unsigned_type_node;
9674 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9675 size_type_node = short_unsigned_type_node;
9676 else
9677 gcc_unreachable ();
9678
9679 /* Fill in the rest of the sized types. Reuse existing type nodes
9680 when possible. */
9681 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9682 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9683 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9684 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9685 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9686
9687 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9688 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9689 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9690 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9691 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9692
9693 /* Don't call build_qualified type for atomics. That routine does
9694 special processing for atomics, and until they are initialized
9695 it's better not to make that call.
9696
9697 Check to see if there is a target override for atomic types. */
9698
9699 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
9700 targetm.atomic_align_for_mode (QImode));
9701 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
9702 targetm.atomic_align_for_mode (HImode));
9703 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
9704 targetm.atomic_align_for_mode (SImode));
9705 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
9706 targetm.atomic_align_for_mode (DImode));
9707 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
9708 targetm.atomic_align_for_mode (TImode));
9709
9710 access_public_node = get_identifier ("public");
9711 access_protected_node = get_identifier ("protected");
9712 access_private_node = get_identifier ("private");
9713
9714 /* Define these next since types below may used them. */
9715 integer_zero_node = build_int_cst (integer_type_node, 0);
9716 integer_one_node = build_int_cst (integer_type_node, 1);
9717 integer_three_node = build_int_cst (integer_type_node, 3);
9718 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9719
9720 size_zero_node = size_int (0);
9721 size_one_node = size_int (1);
9722 bitsize_zero_node = bitsize_int (0);
9723 bitsize_one_node = bitsize_int (1);
9724 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9725
9726 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9727 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9728
9729 void_type_node = make_node (VOID_TYPE);
9730 layout_type (void_type_node);
9731
9732 /* We are not going to have real types in C with less than byte alignment,
9733 so we might as well not have any types that claim to have it. */
9734 TYPE_ALIGN (void_type_node) = BITS_PER_UNIT;
9735 TYPE_USER_ALIGN (void_type_node) = 0;
9736
9737 void_node = make_node (VOID_CST);
9738 TREE_TYPE (void_node) = void_type_node;
9739
9740 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9741 layout_type (TREE_TYPE (null_pointer_node));
9742
9743 ptr_type_node = build_pointer_type (void_type_node);
9744 const_ptr_type_node
9745 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9746 fileptr_type_node = ptr_type_node;
9747
9748 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9749
9750 float_type_node = make_node (REAL_TYPE);
9751 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9752 layout_type (float_type_node);
9753
9754 double_type_node = make_node (REAL_TYPE);
9755 if (short_double)
9756 TYPE_PRECISION (double_type_node) = FLOAT_TYPE_SIZE;
9757 else
9758 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9759 layout_type (double_type_node);
9760
9761 long_double_type_node = make_node (REAL_TYPE);
9762 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9763 layout_type (long_double_type_node);
9764
9765 float_ptr_type_node = build_pointer_type (float_type_node);
9766 double_ptr_type_node = build_pointer_type (double_type_node);
9767 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9768 integer_ptr_type_node = build_pointer_type (integer_type_node);
9769
9770 /* Fixed size integer types. */
9771 uint16_type_node = build_nonstandard_integer_type (16, true);
9772 uint32_type_node = build_nonstandard_integer_type (32, true);
9773 uint64_type_node = build_nonstandard_integer_type (64, true);
9774
9775 /* Decimal float types. */
9776 dfloat32_type_node = make_node (REAL_TYPE);
9777 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9778 layout_type (dfloat32_type_node);
9779 SET_TYPE_MODE (dfloat32_type_node, SDmode);
9780 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
9781
9782 dfloat64_type_node = make_node (REAL_TYPE);
9783 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9784 layout_type (dfloat64_type_node);
9785 SET_TYPE_MODE (dfloat64_type_node, DDmode);
9786 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
9787
9788 dfloat128_type_node = make_node (REAL_TYPE);
9789 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9790 layout_type (dfloat128_type_node);
9791 SET_TYPE_MODE (dfloat128_type_node, TDmode);
9792 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
9793
9794 complex_integer_type_node = build_complex_type (integer_type_node);
9795 complex_float_type_node = build_complex_type (float_type_node);
9796 complex_double_type_node = build_complex_type (double_type_node);
9797 complex_long_double_type_node = build_complex_type (long_double_type_node);
9798
9799 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9800 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9801 sat_ ## KIND ## _type_node = \
9802 make_sat_signed_ ## KIND ## _type (SIZE); \
9803 sat_unsigned_ ## KIND ## _type_node = \
9804 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9805 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9806 unsigned_ ## KIND ## _type_node = \
9807 make_unsigned_ ## KIND ## _type (SIZE);
9808
9809 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9810 sat_ ## WIDTH ## KIND ## _type_node = \
9811 make_sat_signed_ ## KIND ## _type (SIZE); \
9812 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9813 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9814 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9815 unsigned_ ## WIDTH ## KIND ## _type_node = \
9816 make_unsigned_ ## KIND ## _type (SIZE);
9817
9818 /* Make fixed-point type nodes based on four different widths. */
9819 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9820 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9821 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9822 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9823 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9824
9825 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9826 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9827 NAME ## _type_node = \
9828 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9829 u ## NAME ## _type_node = \
9830 make_or_reuse_unsigned_ ## KIND ## _type \
9831 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9832 sat_ ## NAME ## _type_node = \
9833 make_or_reuse_sat_signed_ ## KIND ## _type \
9834 (GET_MODE_BITSIZE (MODE ## mode)); \
9835 sat_u ## NAME ## _type_node = \
9836 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9837 (GET_MODE_BITSIZE (U ## MODE ## mode));
9838
9839 /* Fixed-point type and mode nodes. */
9840 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
9841 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
9842 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
9843 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
9844 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
9845 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
9846 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
9847 MAKE_FIXED_MODE_NODE (accum, ha, HA)
9848 MAKE_FIXED_MODE_NODE (accum, sa, SA)
9849 MAKE_FIXED_MODE_NODE (accum, da, DA)
9850 MAKE_FIXED_MODE_NODE (accum, ta, TA)
9851
9852 {
9853 tree t = targetm.build_builtin_va_list ();
9854
9855 /* Many back-ends define record types without setting TYPE_NAME.
9856 If we copied the record type here, we'd keep the original
9857 record type without a name. This breaks name mangling. So,
9858 don't copy record types and let c_common_nodes_and_builtins()
9859 declare the type to be __builtin_va_list. */
9860 if (TREE_CODE (t) != RECORD_TYPE)
9861 t = build_variant_type_copy (t);
9862
9863 va_list_type_node = t;
9864 }
9865 }
9866
9867 /* Modify DECL for given flags.
9868 TM_PURE attribute is set only on types, so the function will modify
9869 DECL's type when ECF_TM_PURE is used. */
9870
9871 void
9872 set_call_expr_flags (tree decl, int flags)
9873 {
9874 if (flags & ECF_NOTHROW)
9875 TREE_NOTHROW (decl) = 1;
9876 if (flags & ECF_CONST)
9877 TREE_READONLY (decl) = 1;
9878 if (flags & ECF_PURE)
9879 DECL_PURE_P (decl) = 1;
9880 if (flags & ECF_LOOPING_CONST_OR_PURE)
9881 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
9882 if (flags & ECF_NOVOPS)
9883 DECL_IS_NOVOPS (decl) = 1;
9884 if (flags & ECF_NORETURN)
9885 TREE_THIS_VOLATILE (decl) = 1;
9886 if (flags & ECF_MALLOC)
9887 DECL_IS_MALLOC (decl) = 1;
9888 if (flags & ECF_RETURNS_TWICE)
9889 DECL_IS_RETURNS_TWICE (decl) = 1;
9890 if (flags & ECF_LEAF)
9891 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
9892 NULL, DECL_ATTRIBUTES (decl));
9893 if ((flags & ECF_TM_PURE) && flag_tm)
9894 apply_tm_attr (decl, get_identifier ("transaction_pure"));
9895 /* Looping const or pure is implied by noreturn.
9896 There is currently no way to declare looping const or looping pure alone. */
9897 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
9898 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
9899 }
9900
9901
9902 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9903
9904 static void
9905 local_define_builtin (const char *name, tree type, enum built_in_function code,
9906 const char *library_name, int ecf_flags)
9907 {
9908 tree decl;
9909
9910 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
9911 library_name, NULL_TREE);
9912 set_call_expr_flags (decl, ecf_flags);
9913
9914 set_builtin_decl (code, decl, true);
9915 }
9916
9917 /* Call this function after instantiating all builtins that the language
9918 front end cares about. This will build the rest of the builtins that
9919 are relied upon by the tree optimizers and the middle-end. */
9920
9921 void
9922 build_common_builtin_nodes (void)
9923 {
9924 tree tmp, ftype;
9925 int ecf_flags;
9926
9927 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
9928 {
9929 ftype = build_function_type (void_type_node, void_list_node);
9930 local_define_builtin ("__builtin_unreachable", ftype, BUILT_IN_UNREACHABLE,
9931 "__builtin_unreachable",
9932 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
9933 | ECF_CONST | ECF_LEAF);
9934 }
9935
9936 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
9937 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9938 {
9939 ftype = build_function_type_list (ptr_type_node,
9940 ptr_type_node, const_ptr_type_node,
9941 size_type_node, NULL_TREE);
9942
9943 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
9944 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
9945 "memcpy", ECF_NOTHROW | ECF_LEAF);
9946 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9947 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
9948 "memmove", ECF_NOTHROW | ECF_LEAF);
9949 }
9950
9951 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
9952 {
9953 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9954 const_ptr_type_node, size_type_node,
9955 NULL_TREE);
9956 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
9957 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9958 }
9959
9960 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
9961 {
9962 ftype = build_function_type_list (ptr_type_node,
9963 ptr_type_node, integer_type_node,
9964 size_type_node, NULL_TREE);
9965 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
9966 "memset", ECF_NOTHROW | ECF_LEAF);
9967 }
9968
9969 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
9970 {
9971 ftype = build_function_type_list (ptr_type_node,
9972 size_type_node, NULL_TREE);
9973 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
9974 "alloca", ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
9975 }
9976
9977 ftype = build_function_type_list (ptr_type_node, size_type_node,
9978 size_type_node, NULL_TREE);
9979 local_define_builtin ("__builtin_alloca_with_align", ftype,
9980 BUILT_IN_ALLOCA_WITH_ALIGN, "alloca",
9981 ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
9982
9983 /* If we're checking the stack, `alloca' can throw. */
9984 if (flag_stack_check)
9985 {
9986 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA)) = 0;
9987 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN)) = 0;
9988 }
9989
9990 ftype = build_function_type_list (void_type_node,
9991 ptr_type_node, ptr_type_node,
9992 ptr_type_node, NULL_TREE);
9993 local_define_builtin ("__builtin_init_trampoline", ftype,
9994 BUILT_IN_INIT_TRAMPOLINE,
9995 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
9996 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
9997 BUILT_IN_INIT_HEAP_TRAMPOLINE,
9998 "__builtin_init_heap_trampoline",
9999 ECF_NOTHROW | ECF_LEAF);
10000
10001 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
10002 local_define_builtin ("__builtin_adjust_trampoline", ftype,
10003 BUILT_IN_ADJUST_TRAMPOLINE,
10004 "__builtin_adjust_trampoline",
10005 ECF_CONST | ECF_NOTHROW);
10006
10007 ftype = build_function_type_list (void_type_node,
10008 ptr_type_node, ptr_type_node, NULL_TREE);
10009 local_define_builtin ("__builtin_nonlocal_goto", ftype,
10010 BUILT_IN_NONLOCAL_GOTO,
10011 "__builtin_nonlocal_goto",
10012 ECF_NORETURN | ECF_NOTHROW);
10013
10014 ftype = build_function_type_list (void_type_node,
10015 ptr_type_node, ptr_type_node, NULL_TREE);
10016 local_define_builtin ("__builtin_setjmp_setup", ftype,
10017 BUILT_IN_SETJMP_SETUP,
10018 "__builtin_setjmp_setup", ECF_NOTHROW);
10019
10020 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10021 local_define_builtin ("__builtin_setjmp_receiver", ftype,
10022 BUILT_IN_SETJMP_RECEIVER,
10023 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
10024
10025 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
10026 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
10027 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
10028
10029 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10030 local_define_builtin ("__builtin_stack_restore", ftype,
10031 BUILT_IN_STACK_RESTORE,
10032 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
10033
10034 /* If there's a possibility that we might use the ARM EABI, build the
10035 alternate __cxa_end_cleanup node used to resume from C++ and Java. */
10036 if (targetm.arm_eabi_unwinder)
10037 {
10038 ftype = build_function_type_list (void_type_node, NULL_TREE);
10039 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10040 BUILT_IN_CXA_END_CLEANUP,
10041 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10042 }
10043
10044 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10045 local_define_builtin ("__builtin_unwind_resume", ftype,
10046 BUILT_IN_UNWIND_RESUME,
10047 ((targetm_common.except_unwind_info (&global_options)
10048 == UI_SJLJ)
10049 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10050 ECF_NORETURN);
10051
10052 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10053 {
10054 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10055 NULL_TREE);
10056 local_define_builtin ("__builtin_return_address", ftype,
10057 BUILT_IN_RETURN_ADDRESS,
10058 "__builtin_return_address",
10059 ECF_NOTHROW);
10060 }
10061
10062 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10063 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10064 {
10065 ftype = build_function_type_list (void_type_node, ptr_type_node,
10066 ptr_type_node, NULL_TREE);
10067 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10068 local_define_builtin ("__cyg_profile_func_enter", ftype,
10069 BUILT_IN_PROFILE_FUNC_ENTER,
10070 "__cyg_profile_func_enter", 0);
10071 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10072 local_define_builtin ("__cyg_profile_func_exit", ftype,
10073 BUILT_IN_PROFILE_FUNC_EXIT,
10074 "__cyg_profile_func_exit", 0);
10075 }
10076
10077 /* The exception object and filter values from the runtime. The argument
10078 must be zero before exception lowering, i.e. from the front end. After
10079 exception lowering, it will be the region number for the exception
10080 landing pad. These functions are PURE instead of CONST to prevent
10081 them from being hoisted past the exception edge that will initialize
10082 its value in the landing pad. */
10083 ftype = build_function_type_list (ptr_type_node,
10084 integer_type_node, NULL_TREE);
10085 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10086 /* Only use TM_PURE if we we have TM language support. */
10087 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10088 ecf_flags |= ECF_TM_PURE;
10089 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10090 "__builtin_eh_pointer", ecf_flags);
10091
10092 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10093 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10094 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10095 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10096
10097 ftype = build_function_type_list (void_type_node,
10098 integer_type_node, integer_type_node,
10099 NULL_TREE);
10100 local_define_builtin ("__builtin_eh_copy_values", ftype,
10101 BUILT_IN_EH_COPY_VALUES,
10102 "__builtin_eh_copy_values", ECF_NOTHROW);
10103
10104 /* Complex multiplication and division. These are handled as builtins
10105 rather than optabs because emit_library_call_value doesn't support
10106 complex. Further, we can do slightly better with folding these
10107 beasties if the real and complex parts of the arguments are separate. */
10108 {
10109 int mode;
10110
10111 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10112 {
10113 char mode_name_buf[4], *q;
10114 const char *p;
10115 enum built_in_function mcode, dcode;
10116 tree type, inner_type;
10117 const char *prefix = "__";
10118
10119 if (targetm.libfunc_gnu_prefix)
10120 prefix = "__gnu_";
10121
10122 type = lang_hooks.types.type_for_mode ((enum machine_mode) mode, 0);
10123 if (type == NULL)
10124 continue;
10125 inner_type = TREE_TYPE (type);
10126
10127 ftype = build_function_type_list (type, inner_type, inner_type,
10128 inner_type, inner_type, NULL_TREE);
10129
10130 mcode = ((enum built_in_function)
10131 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10132 dcode = ((enum built_in_function)
10133 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10134
10135 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10136 *q = TOLOWER (*p);
10137 *q = '\0';
10138
10139 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10140 NULL);
10141 local_define_builtin (built_in_names[mcode], ftype, mcode,
10142 built_in_names[mcode],
10143 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10144
10145 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10146 NULL);
10147 local_define_builtin (built_in_names[dcode], ftype, dcode,
10148 built_in_names[dcode],
10149 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10150 }
10151 }
10152 }
10153
10154 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10155 better way.
10156
10157 If we requested a pointer to a vector, build up the pointers that
10158 we stripped off while looking for the inner type. Similarly for
10159 return values from functions.
10160
10161 The argument TYPE is the top of the chain, and BOTTOM is the
10162 new type which we will point to. */
10163
10164 tree
10165 reconstruct_complex_type (tree type, tree bottom)
10166 {
10167 tree inner, outer;
10168
10169 if (TREE_CODE (type) == POINTER_TYPE)
10170 {
10171 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10172 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10173 TYPE_REF_CAN_ALIAS_ALL (type));
10174 }
10175 else if (TREE_CODE (type) == REFERENCE_TYPE)
10176 {
10177 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10178 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10179 TYPE_REF_CAN_ALIAS_ALL (type));
10180 }
10181 else if (TREE_CODE (type) == ARRAY_TYPE)
10182 {
10183 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10184 outer = build_array_type (inner, TYPE_DOMAIN (type));
10185 }
10186 else if (TREE_CODE (type) == FUNCTION_TYPE)
10187 {
10188 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10189 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10190 }
10191 else if (TREE_CODE (type) == METHOD_TYPE)
10192 {
10193 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10194 /* The build_method_type_directly() routine prepends 'this' to argument list,
10195 so we must compensate by getting rid of it. */
10196 outer
10197 = build_method_type_directly
10198 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10199 inner,
10200 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10201 }
10202 else if (TREE_CODE (type) == OFFSET_TYPE)
10203 {
10204 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10205 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10206 }
10207 else
10208 return bottom;
10209
10210 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10211 TYPE_QUALS (type));
10212 }
10213
10214 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10215 the inner type. */
10216 tree
10217 build_vector_type_for_mode (tree innertype, enum machine_mode mode)
10218 {
10219 int nunits;
10220
10221 switch (GET_MODE_CLASS (mode))
10222 {
10223 case MODE_VECTOR_INT:
10224 case MODE_VECTOR_FLOAT:
10225 case MODE_VECTOR_FRACT:
10226 case MODE_VECTOR_UFRACT:
10227 case MODE_VECTOR_ACCUM:
10228 case MODE_VECTOR_UACCUM:
10229 nunits = GET_MODE_NUNITS (mode);
10230 break;
10231
10232 case MODE_INT:
10233 /* Check that there are no leftover bits. */
10234 gcc_assert (GET_MODE_BITSIZE (mode)
10235 % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10236
10237 nunits = GET_MODE_BITSIZE (mode)
10238 / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10239 break;
10240
10241 default:
10242 gcc_unreachable ();
10243 }
10244
10245 return make_vector_type (innertype, nunits, mode);
10246 }
10247
10248 /* Similarly, but takes the inner type and number of units, which must be
10249 a power of two. */
10250
10251 tree
10252 build_vector_type (tree innertype, int nunits)
10253 {
10254 return make_vector_type (innertype, nunits, VOIDmode);
10255 }
10256
10257 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10258
10259 tree
10260 build_opaque_vector_type (tree innertype, int nunits)
10261 {
10262 tree t = make_vector_type (innertype, nunits, VOIDmode);
10263 tree cand;
10264 /* We always build the non-opaque variant before the opaque one,
10265 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10266 cand = TYPE_NEXT_VARIANT (t);
10267 if (cand
10268 && TYPE_VECTOR_OPAQUE (cand)
10269 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10270 return cand;
10271 /* Othewise build a variant type and make sure to queue it after
10272 the non-opaque type. */
10273 cand = build_distinct_type_copy (t);
10274 TYPE_VECTOR_OPAQUE (cand) = true;
10275 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10276 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10277 TYPE_NEXT_VARIANT (t) = cand;
10278 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10279 return cand;
10280 }
10281
10282
10283 /* Given an initializer INIT, return TRUE if INIT is zero or some
10284 aggregate of zeros. Otherwise return FALSE. */
10285 bool
10286 initializer_zerop (const_tree init)
10287 {
10288 tree elt;
10289
10290 STRIP_NOPS (init);
10291
10292 switch (TREE_CODE (init))
10293 {
10294 case INTEGER_CST:
10295 return integer_zerop (init);
10296
10297 case REAL_CST:
10298 /* ??? Note that this is not correct for C4X float formats. There,
10299 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10300 negative exponent. */
10301 return real_zerop (init)
10302 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
10303
10304 case FIXED_CST:
10305 return fixed_zerop (init);
10306
10307 case COMPLEX_CST:
10308 return integer_zerop (init)
10309 || (real_zerop (init)
10310 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10311 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
10312
10313 case VECTOR_CST:
10314 {
10315 unsigned i;
10316 for (i = 0; i < VECTOR_CST_NELTS (init); ++i)
10317 if (!initializer_zerop (VECTOR_CST_ELT (init, i)))
10318 return false;
10319 return true;
10320 }
10321
10322 case CONSTRUCTOR:
10323 {
10324 unsigned HOST_WIDE_INT idx;
10325
10326 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10327 if (!initializer_zerop (elt))
10328 return false;
10329 return true;
10330 }
10331
10332 case STRING_CST:
10333 {
10334 int i;
10335
10336 /* We need to loop through all elements to handle cases like
10337 "\0" and "\0foobar". */
10338 for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
10339 if (TREE_STRING_POINTER (init)[i] != '\0')
10340 return false;
10341
10342 return true;
10343 }
10344
10345 default:
10346 return false;
10347 }
10348 }
10349
10350 /* Check if vector VEC consists of all the equal elements and
10351 that the number of elements corresponds to the type of VEC.
10352 The function returns first element of the vector
10353 or NULL_TREE if the vector is not uniform. */
10354 tree
10355 uniform_vector_p (const_tree vec)
10356 {
10357 tree first, t;
10358 unsigned i;
10359
10360 if (vec == NULL_TREE)
10361 return NULL_TREE;
10362
10363 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10364
10365 if (TREE_CODE (vec) == VECTOR_CST)
10366 {
10367 first = VECTOR_CST_ELT (vec, 0);
10368 for (i = 1; i < VECTOR_CST_NELTS (vec); ++i)
10369 if (!operand_equal_p (first, VECTOR_CST_ELT (vec, i), 0))
10370 return NULL_TREE;
10371
10372 return first;
10373 }
10374
10375 else if (TREE_CODE (vec) == CONSTRUCTOR)
10376 {
10377 first = error_mark_node;
10378
10379 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10380 {
10381 if (i == 0)
10382 {
10383 first = t;
10384 continue;
10385 }
10386 if (!operand_equal_p (first, t, 0))
10387 return NULL_TREE;
10388 }
10389 if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)))
10390 return NULL_TREE;
10391
10392 return first;
10393 }
10394
10395 return NULL_TREE;
10396 }
10397
10398 /* Build an empty statement at location LOC. */
10399
10400 tree
10401 build_empty_stmt (location_t loc)
10402 {
10403 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10404 SET_EXPR_LOCATION (t, loc);
10405 return t;
10406 }
10407
10408
10409 /* Build an OpenMP clause with code CODE. LOC is the location of the
10410 clause. */
10411
10412 tree
10413 build_omp_clause (location_t loc, enum omp_clause_code code)
10414 {
10415 tree t;
10416 int size, length;
10417
10418 length = omp_clause_num_ops[code];
10419 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10420
10421 record_node_allocation_statistics (OMP_CLAUSE, size);
10422
10423 t = (tree) ggc_internal_alloc (size);
10424 memset (t, 0, size);
10425 TREE_SET_CODE (t, OMP_CLAUSE);
10426 OMP_CLAUSE_SET_CODE (t, code);
10427 OMP_CLAUSE_LOCATION (t) = loc;
10428
10429 return t;
10430 }
10431
10432 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10433 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10434 Except for the CODE and operand count field, other storage for the
10435 object is initialized to zeros. */
10436
10437 tree
10438 build_vl_exp_stat (enum tree_code code, int len MEM_STAT_DECL)
10439 {
10440 tree t;
10441 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10442
10443 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10444 gcc_assert (len >= 1);
10445
10446 record_node_allocation_statistics (code, length);
10447
10448 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10449
10450 TREE_SET_CODE (t, code);
10451
10452 /* Can't use TREE_OPERAND to store the length because if checking is
10453 enabled, it will try to check the length before we store it. :-P */
10454 t->exp.operands[0] = build_int_cst (sizetype, len);
10455
10456 return t;
10457 }
10458
10459 /* Helper function for build_call_* functions; build a CALL_EXPR with
10460 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10461 the argument slots. */
10462
10463 static tree
10464 build_call_1 (tree return_type, tree fn, int nargs)
10465 {
10466 tree t;
10467
10468 t = build_vl_exp (CALL_EXPR, nargs + 3);
10469 TREE_TYPE (t) = return_type;
10470 CALL_EXPR_FN (t) = fn;
10471 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10472
10473 return t;
10474 }
10475
10476 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10477 FN and a null static chain slot. NARGS is the number of call arguments
10478 which are specified as "..." arguments. */
10479
10480 tree
10481 build_call_nary (tree return_type, tree fn, int nargs, ...)
10482 {
10483 tree ret;
10484 va_list args;
10485 va_start (args, nargs);
10486 ret = build_call_valist (return_type, fn, nargs, args);
10487 va_end (args);
10488 return ret;
10489 }
10490
10491 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10492 FN and a null static chain slot. NARGS is the number of call arguments
10493 which are specified as a va_list ARGS. */
10494
10495 tree
10496 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10497 {
10498 tree t;
10499 int i;
10500
10501 t = build_call_1 (return_type, fn, nargs);
10502 for (i = 0; i < nargs; i++)
10503 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10504 process_call_operands (t);
10505 return t;
10506 }
10507
10508 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10509 FN and a null static chain slot. NARGS is the number of call arguments
10510 which are specified as a tree array ARGS. */
10511
10512 tree
10513 build_call_array_loc (location_t loc, tree return_type, tree fn,
10514 int nargs, const tree *args)
10515 {
10516 tree t;
10517 int i;
10518
10519 t = build_call_1 (return_type, fn, nargs);
10520 for (i = 0; i < nargs; i++)
10521 CALL_EXPR_ARG (t, i) = args[i];
10522 process_call_operands (t);
10523 SET_EXPR_LOCATION (t, loc);
10524 return t;
10525 }
10526
10527 /* Like build_call_array, but takes a vec. */
10528
10529 tree
10530 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
10531 {
10532 tree ret, t;
10533 unsigned int ix;
10534
10535 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10536 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10537 CALL_EXPR_ARG (ret, ix) = t;
10538 process_call_operands (ret);
10539 return ret;
10540 }
10541
10542 /* Conveniently construct a function call expression. FNDECL names the
10543 function to be called and N arguments are passed in the array
10544 ARGARRAY. */
10545
10546 tree
10547 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10548 {
10549 tree fntype = TREE_TYPE (fndecl);
10550 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10551
10552 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10553 }
10554
10555 /* Conveniently construct a function call expression. FNDECL names the
10556 function to be called and the arguments are passed in the vector
10557 VEC. */
10558
10559 tree
10560 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10561 {
10562 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10563 vec_safe_address (vec));
10564 }
10565
10566
10567 /* Conveniently construct a function call expression. FNDECL names the
10568 function to be called, N is the number of arguments, and the "..."
10569 parameters are the argument expressions. */
10570
10571 tree
10572 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10573 {
10574 va_list ap;
10575 tree *argarray = XALLOCAVEC (tree, n);
10576 int i;
10577
10578 va_start (ap, n);
10579 for (i = 0; i < n; i++)
10580 argarray[i] = va_arg (ap, tree);
10581 va_end (ap);
10582 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10583 }
10584
10585 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10586 varargs macros aren't supported by all bootstrap compilers. */
10587
10588 tree
10589 build_call_expr (tree fndecl, int n, ...)
10590 {
10591 va_list ap;
10592 tree *argarray = XALLOCAVEC (tree, n);
10593 int i;
10594
10595 va_start (ap, n);
10596 for (i = 0; i < n; i++)
10597 argarray[i] = va_arg (ap, tree);
10598 va_end (ap);
10599 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10600 }
10601
10602 /* Build internal call expression. This is just like CALL_EXPR, except
10603 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10604 internal function. */
10605
10606 tree
10607 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
10608 tree type, int n, ...)
10609 {
10610 va_list ap;
10611 int i;
10612
10613 tree fn = build_call_1 (type, NULL_TREE, n);
10614 va_start (ap, n);
10615 for (i = 0; i < n; i++)
10616 CALL_EXPR_ARG (fn, i) = va_arg (ap, tree);
10617 va_end (ap);
10618 SET_EXPR_LOCATION (fn, loc);
10619 CALL_EXPR_IFN (fn) = ifn;
10620 return fn;
10621 }
10622
10623 /* Create a new constant string literal and return a char* pointer to it.
10624 The STRING_CST value is the LEN characters at STR. */
10625 tree
10626 build_string_literal (int len, const char *str)
10627 {
10628 tree t, elem, index, type;
10629
10630 t = build_string (len, str);
10631 elem = build_type_variant (char_type_node, 1, 0);
10632 index = build_index_type (size_int (len - 1));
10633 type = build_array_type (elem, index);
10634 TREE_TYPE (t) = type;
10635 TREE_CONSTANT (t) = 1;
10636 TREE_READONLY (t) = 1;
10637 TREE_STATIC (t) = 1;
10638
10639 type = build_pointer_type (elem);
10640 t = build1 (ADDR_EXPR, type,
10641 build4 (ARRAY_REF, elem,
10642 t, integer_zero_node, NULL_TREE, NULL_TREE));
10643 return t;
10644 }
10645
10646
10647
10648 /* Return true if T (assumed to be a DECL) must be assigned a memory
10649 location. */
10650
10651 bool
10652 needs_to_live_in_memory (const_tree t)
10653 {
10654 return (TREE_ADDRESSABLE (t)
10655 || is_global_var (t)
10656 || (TREE_CODE (t) == RESULT_DECL
10657 && !DECL_BY_REFERENCE (t)
10658 && aggregate_value_p (t, current_function_decl)));
10659 }
10660
10661 /* Return value of a constant X and sign-extend it. */
10662
10663 HOST_WIDE_INT
10664 int_cst_value (const_tree x)
10665 {
10666 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10667 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
10668
10669 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10670 gcc_assert (cst_and_fits_in_hwi (x));
10671
10672 if (bits < HOST_BITS_PER_WIDE_INT)
10673 {
10674 bool negative = ((val >> (bits - 1)) & 1) != 0;
10675 if (negative)
10676 val |= (~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1;
10677 else
10678 val &= ~((~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1);
10679 }
10680
10681 return val;
10682 }
10683
10684 /* If TYPE is an integral or pointer type, return an integer type with
10685 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10686 if TYPE is already an integer type of signedness UNSIGNEDP. */
10687
10688 tree
10689 signed_or_unsigned_type_for (int unsignedp, tree type)
10690 {
10691 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
10692 return type;
10693
10694 if (TREE_CODE (type) == VECTOR_TYPE)
10695 {
10696 tree inner = TREE_TYPE (type);
10697 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10698 if (!inner2)
10699 return NULL_TREE;
10700 if (inner == inner2)
10701 return type;
10702 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
10703 }
10704
10705 if (!INTEGRAL_TYPE_P (type)
10706 && !POINTER_TYPE_P (type)
10707 && TREE_CODE (type) != OFFSET_TYPE)
10708 return NULL_TREE;
10709
10710 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
10711 }
10712
10713 /* If TYPE is an integral or pointer type, return an integer type with
10714 the same precision which is unsigned, or itself if TYPE is already an
10715 unsigned integer type. */
10716
10717 tree
10718 unsigned_type_for (tree type)
10719 {
10720 return signed_or_unsigned_type_for (1, type);
10721 }
10722
10723 /* If TYPE is an integral or pointer type, return an integer type with
10724 the same precision which is signed, or itself if TYPE is already a
10725 signed integer type. */
10726
10727 tree
10728 signed_type_for (tree type)
10729 {
10730 return signed_or_unsigned_type_for (0, type);
10731 }
10732
10733 /* If TYPE is a vector type, return a signed integer vector type with the
10734 same width and number of subparts. Otherwise return boolean_type_node. */
10735
10736 tree
10737 truth_type_for (tree type)
10738 {
10739 if (TREE_CODE (type) == VECTOR_TYPE)
10740 {
10741 tree elem = lang_hooks.types.type_for_size
10742 (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))), 0);
10743 return build_opaque_vector_type (elem, TYPE_VECTOR_SUBPARTS (type));
10744 }
10745 else
10746 return boolean_type_node;
10747 }
10748
10749 /* Returns the largest value obtainable by casting something in INNER type to
10750 OUTER type. */
10751
10752 tree
10753 upper_bound_in_type (tree outer, tree inner)
10754 {
10755 unsigned int det = 0;
10756 unsigned oprec = TYPE_PRECISION (outer);
10757 unsigned iprec = TYPE_PRECISION (inner);
10758 unsigned prec;
10759
10760 /* Compute a unique number for every combination. */
10761 det |= (oprec > iprec) ? 4 : 0;
10762 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
10763 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
10764
10765 /* Determine the exponent to use. */
10766 switch (det)
10767 {
10768 case 0:
10769 case 1:
10770 /* oprec <= iprec, outer: signed, inner: don't care. */
10771 prec = oprec - 1;
10772 break;
10773 case 2:
10774 case 3:
10775 /* oprec <= iprec, outer: unsigned, inner: don't care. */
10776 prec = oprec;
10777 break;
10778 case 4:
10779 /* oprec > iprec, outer: signed, inner: signed. */
10780 prec = iprec - 1;
10781 break;
10782 case 5:
10783 /* oprec > iprec, outer: signed, inner: unsigned. */
10784 prec = iprec;
10785 break;
10786 case 6:
10787 /* oprec > iprec, outer: unsigned, inner: signed. */
10788 prec = oprec;
10789 break;
10790 case 7:
10791 /* oprec > iprec, outer: unsigned, inner: unsigned. */
10792 prec = iprec;
10793 break;
10794 default:
10795 gcc_unreachable ();
10796 }
10797
10798 return wide_int_to_tree (outer,
10799 wi::mask (prec, false, TYPE_PRECISION (outer)));
10800 }
10801
10802 /* Returns the smallest value obtainable by casting something in INNER type to
10803 OUTER type. */
10804
10805 tree
10806 lower_bound_in_type (tree outer, tree inner)
10807 {
10808 unsigned oprec = TYPE_PRECISION (outer);
10809 unsigned iprec = TYPE_PRECISION (inner);
10810
10811 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
10812 and obtain 0. */
10813 if (TYPE_UNSIGNED (outer)
10814 /* If we are widening something of an unsigned type, OUTER type
10815 contains all values of INNER type. In particular, both INNER
10816 and OUTER types have zero in common. */
10817 || (oprec > iprec && TYPE_UNSIGNED (inner)))
10818 return build_int_cst (outer, 0);
10819 else
10820 {
10821 /* If we are widening a signed type to another signed type, we
10822 want to obtain -2^^(iprec-1). If we are keeping the
10823 precision or narrowing to a signed type, we want to obtain
10824 -2^(oprec-1). */
10825 unsigned prec = oprec > iprec ? iprec : oprec;
10826 return wide_int_to_tree (outer,
10827 wi::mask (prec - 1, true,
10828 TYPE_PRECISION (outer)));
10829 }
10830 }
10831
10832 /* Return nonzero if two operands that are suitable for PHI nodes are
10833 necessarily equal. Specifically, both ARG0 and ARG1 must be either
10834 SSA_NAME or invariant. Note that this is strictly an optimization.
10835 That is, callers of this function can directly call operand_equal_p
10836 and get the same result, only slower. */
10837
10838 int
10839 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
10840 {
10841 if (arg0 == arg1)
10842 return 1;
10843 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
10844 return 0;
10845 return operand_equal_p (arg0, arg1, 0);
10846 }
10847
10848 /* Returns number of zeros at the end of binary representation of X. */
10849
10850 tree
10851 num_ending_zeros (const_tree x)
10852 {
10853 return build_int_cst (TREE_TYPE (x), wi::ctz (x));
10854 }
10855
10856
10857 #define WALK_SUBTREE(NODE) \
10858 do \
10859 { \
10860 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
10861 if (result) \
10862 return result; \
10863 } \
10864 while (0)
10865
10866 /* This is a subroutine of walk_tree that walks field of TYPE that are to
10867 be walked whenever a type is seen in the tree. Rest of operands and return
10868 value are as for walk_tree. */
10869
10870 static tree
10871 walk_type_fields (tree type, walk_tree_fn func, void *data,
10872 struct pointer_set_t *pset, walk_tree_lh lh)
10873 {
10874 tree result = NULL_TREE;
10875
10876 switch (TREE_CODE (type))
10877 {
10878 case POINTER_TYPE:
10879 case REFERENCE_TYPE:
10880 case VECTOR_TYPE:
10881 /* We have to worry about mutually recursive pointers. These can't
10882 be written in C. They can in Ada. It's pathological, but
10883 there's an ACATS test (c38102a) that checks it. Deal with this
10884 by checking if we're pointing to another pointer, that one
10885 points to another pointer, that one does too, and we have no htab.
10886 If so, get a hash table. We check three levels deep to avoid
10887 the cost of the hash table if we don't need one. */
10888 if (POINTER_TYPE_P (TREE_TYPE (type))
10889 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
10890 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
10891 && !pset)
10892 {
10893 result = walk_tree_without_duplicates (&TREE_TYPE (type),
10894 func, data);
10895 if (result)
10896 return result;
10897
10898 break;
10899 }
10900
10901 /* ... fall through ... */
10902
10903 case COMPLEX_TYPE:
10904 WALK_SUBTREE (TREE_TYPE (type));
10905 break;
10906
10907 case METHOD_TYPE:
10908 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
10909
10910 /* Fall through. */
10911
10912 case FUNCTION_TYPE:
10913 WALK_SUBTREE (TREE_TYPE (type));
10914 {
10915 tree arg;
10916
10917 /* We never want to walk into default arguments. */
10918 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
10919 WALK_SUBTREE (TREE_VALUE (arg));
10920 }
10921 break;
10922
10923 case ARRAY_TYPE:
10924 /* Don't follow this nodes's type if a pointer for fear that
10925 we'll have infinite recursion. If we have a PSET, then we
10926 need not fear. */
10927 if (pset
10928 || (!POINTER_TYPE_P (TREE_TYPE (type))
10929 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
10930 WALK_SUBTREE (TREE_TYPE (type));
10931 WALK_SUBTREE (TYPE_DOMAIN (type));
10932 break;
10933
10934 case OFFSET_TYPE:
10935 WALK_SUBTREE (TREE_TYPE (type));
10936 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
10937 break;
10938
10939 default:
10940 break;
10941 }
10942
10943 return NULL_TREE;
10944 }
10945
10946 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
10947 called with the DATA and the address of each sub-tree. If FUNC returns a
10948 non-NULL value, the traversal is stopped, and the value returned by FUNC
10949 is returned. If PSET is non-NULL it is used to record the nodes visited,
10950 and to avoid visiting a node more than once. */
10951
10952 tree
10953 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
10954 struct pointer_set_t *pset, walk_tree_lh lh)
10955 {
10956 enum tree_code code;
10957 int walk_subtrees;
10958 tree result;
10959
10960 #define WALK_SUBTREE_TAIL(NODE) \
10961 do \
10962 { \
10963 tp = & (NODE); \
10964 goto tail_recurse; \
10965 } \
10966 while (0)
10967
10968 tail_recurse:
10969 /* Skip empty subtrees. */
10970 if (!*tp)
10971 return NULL_TREE;
10972
10973 /* Don't walk the same tree twice, if the user has requested
10974 that we avoid doing so. */
10975 if (pset && pointer_set_insert (pset, *tp))
10976 return NULL_TREE;
10977
10978 /* Call the function. */
10979 walk_subtrees = 1;
10980 result = (*func) (tp, &walk_subtrees, data);
10981
10982 /* If we found something, return it. */
10983 if (result)
10984 return result;
10985
10986 code = TREE_CODE (*tp);
10987
10988 /* Even if we didn't, FUNC may have decided that there was nothing
10989 interesting below this point in the tree. */
10990 if (!walk_subtrees)
10991 {
10992 /* But we still need to check our siblings. */
10993 if (code == TREE_LIST)
10994 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
10995 else if (code == OMP_CLAUSE)
10996 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
10997 else
10998 return NULL_TREE;
10999 }
11000
11001 if (lh)
11002 {
11003 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11004 if (result || !walk_subtrees)
11005 return result;
11006 }
11007
11008 switch (code)
11009 {
11010 case ERROR_MARK:
11011 case IDENTIFIER_NODE:
11012 case INTEGER_CST:
11013 case REAL_CST:
11014 case FIXED_CST:
11015 case VECTOR_CST:
11016 case STRING_CST:
11017 case BLOCK:
11018 case PLACEHOLDER_EXPR:
11019 case SSA_NAME:
11020 case FIELD_DECL:
11021 case RESULT_DECL:
11022 /* None of these have subtrees other than those already walked
11023 above. */
11024 break;
11025
11026 case TREE_LIST:
11027 WALK_SUBTREE (TREE_VALUE (*tp));
11028 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11029 break;
11030
11031 case TREE_VEC:
11032 {
11033 int len = TREE_VEC_LENGTH (*tp);
11034
11035 if (len == 0)
11036 break;
11037
11038 /* Walk all elements but the first. */
11039 while (--len)
11040 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11041
11042 /* Now walk the first one as a tail call. */
11043 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11044 }
11045
11046 case COMPLEX_CST:
11047 WALK_SUBTREE (TREE_REALPART (*tp));
11048 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11049
11050 case CONSTRUCTOR:
11051 {
11052 unsigned HOST_WIDE_INT idx;
11053 constructor_elt *ce;
11054
11055 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11056 idx++)
11057 WALK_SUBTREE (ce->value);
11058 }
11059 break;
11060
11061 case SAVE_EXPR:
11062 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11063
11064 case BIND_EXPR:
11065 {
11066 tree decl;
11067 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11068 {
11069 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11070 into declarations that are just mentioned, rather than
11071 declared; they don't really belong to this part of the tree.
11072 And, we can see cycles: the initializer for a declaration
11073 can refer to the declaration itself. */
11074 WALK_SUBTREE (DECL_INITIAL (decl));
11075 WALK_SUBTREE (DECL_SIZE (decl));
11076 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11077 }
11078 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11079 }
11080
11081 case STATEMENT_LIST:
11082 {
11083 tree_stmt_iterator i;
11084 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11085 WALK_SUBTREE (*tsi_stmt_ptr (i));
11086 }
11087 break;
11088
11089 case OMP_CLAUSE:
11090 switch (OMP_CLAUSE_CODE (*tp))
11091 {
11092 case OMP_CLAUSE_PRIVATE:
11093 case OMP_CLAUSE_SHARED:
11094 case OMP_CLAUSE_FIRSTPRIVATE:
11095 case OMP_CLAUSE_COPYIN:
11096 case OMP_CLAUSE_COPYPRIVATE:
11097 case OMP_CLAUSE_FINAL:
11098 case OMP_CLAUSE_IF:
11099 case OMP_CLAUSE_NUM_THREADS:
11100 case OMP_CLAUSE_SCHEDULE:
11101 case OMP_CLAUSE_UNIFORM:
11102 case OMP_CLAUSE_DEPEND:
11103 case OMP_CLAUSE_NUM_TEAMS:
11104 case OMP_CLAUSE_THREAD_LIMIT:
11105 case OMP_CLAUSE_DEVICE:
11106 case OMP_CLAUSE_DIST_SCHEDULE:
11107 case OMP_CLAUSE_SAFELEN:
11108 case OMP_CLAUSE_SIMDLEN:
11109 case OMP_CLAUSE__LOOPTEMP_:
11110 case OMP_CLAUSE__SIMDUID_:
11111 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11112 /* FALLTHRU */
11113
11114 case OMP_CLAUSE_NOWAIT:
11115 case OMP_CLAUSE_ORDERED:
11116 case OMP_CLAUSE_DEFAULT:
11117 case OMP_CLAUSE_UNTIED:
11118 case OMP_CLAUSE_MERGEABLE:
11119 case OMP_CLAUSE_PROC_BIND:
11120 case OMP_CLAUSE_INBRANCH:
11121 case OMP_CLAUSE_NOTINBRANCH:
11122 case OMP_CLAUSE_FOR:
11123 case OMP_CLAUSE_PARALLEL:
11124 case OMP_CLAUSE_SECTIONS:
11125 case OMP_CLAUSE_TASKGROUP:
11126 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11127
11128 case OMP_CLAUSE_LASTPRIVATE:
11129 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11130 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11131 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11132
11133 case OMP_CLAUSE_COLLAPSE:
11134 {
11135 int i;
11136 for (i = 0; i < 3; i++)
11137 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11138 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11139 }
11140
11141 case OMP_CLAUSE_LINEAR:
11142 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11143 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
11144 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
11145 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11146
11147 case OMP_CLAUSE_ALIGNED:
11148 case OMP_CLAUSE_FROM:
11149 case OMP_CLAUSE_TO:
11150 case OMP_CLAUSE_MAP:
11151 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11152 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11153 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11154
11155 case OMP_CLAUSE_REDUCTION:
11156 {
11157 int i;
11158 for (i = 0; i < 4; i++)
11159 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11160 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11161 }
11162
11163 default:
11164 gcc_unreachable ();
11165 }
11166 break;
11167
11168 case TARGET_EXPR:
11169 {
11170 int i, len;
11171
11172 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11173 But, we only want to walk once. */
11174 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11175 for (i = 0; i < len; ++i)
11176 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11177 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11178 }
11179
11180 case DECL_EXPR:
11181 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11182 defining. We only want to walk into these fields of a type in this
11183 case and not in the general case of a mere reference to the type.
11184
11185 The criterion is as follows: if the field can be an expression, it
11186 must be walked only here. This should be in keeping with the fields
11187 that are directly gimplified in gimplify_type_sizes in order for the
11188 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11189 variable-sized types.
11190
11191 Note that DECLs get walked as part of processing the BIND_EXPR. */
11192 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11193 {
11194 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11195 if (TREE_CODE (*type_p) == ERROR_MARK)
11196 return NULL_TREE;
11197
11198 /* Call the function for the type. See if it returns anything or
11199 doesn't want us to continue. If we are to continue, walk both
11200 the normal fields and those for the declaration case. */
11201 result = (*func) (type_p, &walk_subtrees, data);
11202 if (result || !walk_subtrees)
11203 return result;
11204
11205 /* But do not walk a pointed-to type since it may itself need to
11206 be walked in the declaration case if it isn't anonymous. */
11207 if (!POINTER_TYPE_P (*type_p))
11208 {
11209 result = walk_type_fields (*type_p, func, data, pset, lh);
11210 if (result)
11211 return result;
11212 }
11213
11214 /* If this is a record type, also walk the fields. */
11215 if (RECORD_OR_UNION_TYPE_P (*type_p))
11216 {
11217 tree field;
11218
11219 for (field = TYPE_FIELDS (*type_p); field;
11220 field = DECL_CHAIN (field))
11221 {
11222 /* We'd like to look at the type of the field, but we can
11223 easily get infinite recursion. So assume it's pointed
11224 to elsewhere in the tree. Also, ignore things that
11225 aren't fields. */
11226 if (TREE_CODE (field) != FIELD_DECL)
11227 continue;
11228
11229 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11230 WALK_SUBTREE (DECL_SIZE (field));
11231 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11232 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11233 WALK_SUBTREE (DECL_QUALIFIER (field));
11234 }
11235 }
11236
11237 /* Same for scalar types. */
11238 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11239 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11240 || TREE_CODE (*type_p) == INTEGER_TYPE
11241 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11242 || TREE_CODE (*type_p) == REAL_TYPE)
11243 {
11244 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11245 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11246 }
11247
11248 WALK_SUBTREE (TYPE_SIZE (*type_p));
11249 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11250 }
11251 /* FALLTHRU */
11252
11253 default:
11254 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11255 {
11256 int i, len;
11257
11258 /* Walk over all the sub-trees of this operand. */
11259 len = TREE_OPERAND_LENGTH (*tp);
11260
11261 /* Go through the subtrees. We need to do this in forward order so
11262 that the scope of a FOR_EXPR is handled properly. */
11263 if (len)
11264 {
11265 for (i = 0; i < len - 1; ++i)
11266 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11267 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11268 }
11269 }
11270 /* If this is a type, walk the needed fields in the type. */
11271 else if (TYPE_P (*tp))
11272 return walk_type_fields (*tp, func, data, pset, lh);
11273 break;
11274 }
11275
11276 /* We didn't find what we were looking for. */
11277 return NULL_TREE;
11278
11279 #undef WALK_SUBTREE_TAIL
11280 }
11281 #undef WALK_SUBTREE
11282
11283 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11284
11285 tree
11286 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11287 walk_tree_lh lh)
11288 {
11289 tree result;
11290 struct pointer_set_t *pset;
11291
11292 pset = pointer_set_create ();
11293 result = walk_tree_1 (tp, func, data, pset, lh);
11294 pointer_set_destroy (pset);
11295 return result;
11296 }
11297
11298
11299 tree
11300 tree_block (tree t)
11301 {
11302 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11303
11304 if (IS_EXPR_CODE_CLASS (c))
11305 return LOCATION_BLOCK (t->exp.locus);
11306 gcc_unreachable ();
11307 return NULL;
11308 }
11309
11310 void
11311 tree_set_block (tree t, tree b)
11312 {
11313 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11314
11315 if (IS_EXPR_CODE_CLASS (c))
11316 {
11317 if (b)
11318 t->exp.locus = COMBINE_LOCATION_DATA (line_table, t->exp.locus, b);
11319 else
11320 t->exp.locus = LOCATION_LOCUS (t->exp.locus);
11321 }
11322 else
11323 gcc_unreachable ();
11324 }
11325
11326 /* Create a nameless artificial label and put it in the current
11327 function context. The label has a location of LOC. Returns the
11328 newly created label. */
11329
11330 tree
11331 create_artificial_label (location_t loc)
11332 {
11333 tree lab = build_decl (loc,
11334 LABEL_DECL, NULL_TREE, void_type_node);
11335
11336 DECL_ARTIFICIAL (lab) = 1;
11337 DECL_IGNORED_P (lab) = 1;
11338 DECL_CONTEXT (lab) = current_function_decl;
11339 return lab;
11340 }
11341
11342 /* Given a tree, try to return a useful variable name that we can use
11343 to prefix a temporary that is being assigned the value of the tree.
11344 I.E. given <temp> = &A, return A. */
11345
11346 const char *
11347 get_name (tree t)
11348 {
11349 tree stripped_decl;
11350
11351 stripped_decl = t;
11352 STRIP_NOPS (stripped_decl);
11353 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11354 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11355 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11356 {
11357 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11358 if (!name)
11359 return NULL;
11360 return IDENTIFIER_POINTER (name);
11361 }
11362 else
11363 {
11364 switch (TREE_CODE (stripped_decl))
11365 {
11366 case ADDR_EXPR:
11367 return get_name (TREE_OPERAND (stripped_decl, 0));
11368 default:
11369 return NULL;
11370 }
11371 }
11372 }
11373
11374 /* Return true if TYPE has a variable argument list. */
11375
11376 bool
11377 stdarg_p (const_tree fntype)
11378 {
11379 function_args_iterator args_iter;
11380 tree n = NULL_TREE, t;
11381
11382 if (!fntype)
11383 return false;
11384
11385 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11386 {
11387 n = t;
11388 }
11389
11390 return n != NULL_TREE && n != void_type_node;
11391 }
11392
11393 /* Return true if TYPE has a prototype. */
11394
11395 bool
11396 prototype_p (tree fntype)
11397 {
11398 tree t;
11399
11400 gcc_assert (fntype != NULL_TREE);
11401
11402 t = TYPE_ARG_TYPES (fntype);
11403 return (t != NULL_TREE);
11404 }
11405
11406 /* If BLOCK is inlined from an __attribute__((__artificial__))
11407 routine, return pointer to location from where it has been
11408 called. */
11409 location_t *
11410 block_nonartificial_location (tree block)
11411 {
11412 location_t *ret = NULL;
11413
11414 while (block && TREE_CODE (block) == BLOCK
11415 && BLOCK_ABSTRACT_ORIGIN (block))
11416 {
11417 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11418
11419 while (TREE_CODE (ao) == BLOCK
11420 && BLOCK_ABSTRACT_ORIGIN (ao)
11421 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
11422 ao = BLOCK_ABSTRACT_ORIGIN (ao);
11423
11424 if (TREE_CODE (ao) == FUNCTION_DECL)
11425 {
11426 /* If AO is an artificial inline, point RET to the
11427 call site locus at which it has been inlined and continue
11428 the loop, in case AO's caller is also an artificial
11429 inline. */
11430 if (DECL_DECLARED_INLINE_P (ao)
11431 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11432 ret = &BLOCK_SOURCE_LOCATION (block);
11433 else
11434 break;
11435 }
11436 else if (TREE_CODE (ao) != BLOCK)
11437 break;
11438
11439 block = BLOCK_SUPERCONTEXT (block);
11440 }
11441 return ret;
11442 }
11443
11444
11445 /* If EXP is inlined from an __attribute__((__artificial__))
11446 function, return the location of the original call expression. */
11447
11448 location_t
11449 tree_nonartificial_location (tree exp)
11450 {
11451 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11452
11453 if (loc)
11454 return *loc;
11455 else
11456 return EXPR_LOCATION (exp);
11457 }
11458
11459
11460 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
11461 nodes. */
11462
11463 /* Return the hash code code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11464
11465 static hashval_t
11466 cl_option_hash_hash (const void *x)
11467 {
11468 const_tree const t = (const_tree) x;
11469 const char *p;
11470 size_t i;
11471 size_t len = 0;
11472 hashval_t hash = 0;
11473
11474 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11475 {
11476 p = (const char *)TREE_OPTIMIZATION (t);
11477 len = sizeof (struct cl_optimization);
11478 }
11479
11480 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11481 {
11482 p = (const char *)TREE_TARGET_OPTION (t);
11483 len = sizeof (struct cl_target_option);
11484 }
11485
11486 else
11487 gcc_unreachable ();
11488
11489 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11490 something else. */
11491 for (i = 0; i < len; i++)
11492 if (p[i])
11493 hash = (hash << 4) ^ ((i << 2) | p[i]);
11494
11495 return hash;
11496 }
11497
11498 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11499 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11500 same. */
11501
11502 static int
11503 cl_option_hash_eq (const void *x, const void *y)
11504 {
11505 const_tree const xt = (const_tree) x;
11506 const_tree const yt = (const_tree) y;
11507 const char *xp;
11508 const char *yp;
11509 size_t len;
11510
11511 if (TREE_CODE (xt) != TREE_CODE (yt))
11512 return 0;
11513
11514 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11515 {
11516 xp = (const char *)TREE_OPTIMIZATION (xt);
11517 yp = (const char *)TREE_OPTIMIZATION (yt);
11518 len = sizeof (struct cl_optimization);
11519 }
11520
11521 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11522 {
11523 xp = (const char *)TREE_TARGET_OPTION (xt);
11524 yp = (const char *)TREE_TARGET_OPTION (yt);
11525 len = sizeof (struct cl_target_option);
11526 }
11527
11528 else
11529 gcc_unreachable ();
11530
11531 return (memcmp (xp, yp, len) == 0);
11532 }
11533
11534 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
11535
11536 tree
11537 build_optimization_node (struct gcc_options *opts)
11538 {
11539 tree t;
11540 void **slot;
11541
11542 /* Use the cache of optimization nodes. */
11543
11544 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11545 opts);
11546
11547 slot = htab_find_slot (cl_option_hash_table, cl_optimization_node, INSERT);
11548 t = (tree) *slot;
11549 if (!t)
11550 {
11551 /* Insert this one into the hash table. */
11552 t = cl_optimization_node;
11553 *slot = t;
11554
11555 /* Make a new node for next time round. */
11556 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11557 }
11558
11559 return t;
11560 }
11561
11562 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
11563
11564 tree
11565 build_target_option_node (struct gcc_options *opts)
11566 {
11567 tree t;
11568 void **slot;
11569
11570 /* Use the cache of optimization nodes. */
11571
11572 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11573 opts);
11574
11575 slot = htab_find_slot (cl_option_hash_table, cl_target_option_node, INSERT);
11576 t = (tree) *slot;
11577 if (!t)
11578 {
11579 /* Insert this one into the hash table. */
11580 t = cl_target_option_node;
11581 *slot = t;
11582
11583 /* Make a new node for next time round. */
11584 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11585 }
11586
11587 return t;
11588 }
11589
11590 /* Reset TREE_TARGET_GLOBALS cache for TARGET_OPTION_NODE.
11591 Called through htab_traverse. */
11592
11593 static int
11594 prepare_target_option_node_for_pch (void **slot, void *)
11595 {
11596 tree node = (tree) *slot;
11597 if (TREE_CODE (node) == TARGET_OPTION_NODE)
11598 TREE_TARGET_GLOBALS (node) = NULL;
11599 return 1;
11600 }
11601
11602 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11603 so that they aren't saved during PCH writing. */
11604
11605 void
11606 prepare_target_option_nodes_for_pch (void)
11607 {
11608 htab_traverse (cl_option_hash_table, prepare_target_option_node_for_pch,
11609 NULL);
11610 }
11611
11612 /* Determine the "ultimate origin" of a block. The block may be an inlined
11613 instance of an inlined instance of a block which is local to an inline
11614 function, so we have to trace all of the way back through the origin chain
11615 to find out what sort of node actually served as the original seed for the
11616 given block. */
11617
11618 tree
11619 block_ultimate_origin (const_tree block)
11620 {
11621 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
11622
11623 /* output_inline_function sets BLOCK_ABSTRACT_ORIGIN for all the
11624 nodes in the function to point to themselves; ignore that if
11625 we're trying to output the abstract instance of this function. */
11626 if (BLOCK_ABSTRACT (block) && immediate_origin == block)
11627 return NULL_TREE;
11628
11629 if (immediate_origin == NULL_TREE)
11630 return NULL_TREE;
11631 else
11632 {
11633 tree ret_val;
11634 tree lookahead = immediate_origin;
11635
11636 do
11637 {
11638 ret_val = lookahead;
11639 lookahead = (TREE_CODE (ret_val) == BLOCK
11640 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
11641 }
11642 while (lookahead != NULL && lookahead != ret_val);
11643
11644 /* The block's abstract origin chain may not be the *ultimate* origin of
11645 the block. It could lead to a DECL that has an abstract origin set.
11646 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
11647 will give us if it has one). Note that DECL's abstract origins are
11648 supposed to be the most distant ancestor (or so decl_ultimate_origin
11649 claims), so we don't need to loop following the DECL origins. */
11650 if (DECL_P (ret_val))
11651 return DECL_ORIGIN (ret_val);
11652
11653 return ret_val;
11654 }
11655 }
11656
11657 /* Return true iff conversion in EXP generates no instruction. Mark
11658 it inline so that we fully inline into the stripping functions even
11659 though we have two uses of this function. */
11660
11661 static inline bool
11662 tree_nop_conversion (const_tree exp)
11663 {
11664 tree outer_type, inner_type;
11665
11666 if (!CONVERT_EXPR_P (exp)
11667 && TREE_CODE (exp) != NON_LVALUE_EXPR)
11668 return false;
11669 if (TREE_OPERAND (exp, 0) == error_mark_node)
11670 return false;
11671
11672 outer_type = TREE_TYPE (exp);
11673 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11674
11675 if (!inner_type)
11676 return false;
11677
11678 /* Use precision rather then machine mode when we can, which gives
11679 the correct answer even for submode (bit-field) types. */
11680 if ((INTEGRAL_TYPE_P (outer_type)
11681 || POINTER_TYPE_P (outer_type)
11682 || TREE_CODE (outer_type) == OFFSET_TYPE)
11683 && (INTEGRAL_TYPE_P (inner_type)
11684 || POINTER_TYPE_P (inner_type)
11685 || TREE_CODE (inner_type) == OFFSET_TYPE))
11686 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
11687
11688 /* Otherwise fall back on comparing machine modes (e.g. for
11689 aggregate types, floats). */
11690 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
11691 }
11692
11693 /* Return true iff conversion in EXP generates no instruction. Don't
11694 consider conversions changing the signedness. */
11695
11696 static bool
11697 tree_sign_nop_conversion (const_tree exp)
11698 {
11699 tree outer_type, inner_type;
11700
11701 if (!tree_nop_conversion (exp))
11702 return false;
11703
11704 outer_type = TREE_TYPE (exp);
11705 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11706
11707 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
11708 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
11709 }
11710
11711 /* Strip conversions from EXP according to tree_nop_conversion and
11712 return the resulting expression. */
11713
11714 tree
11715 tree_strip_nop_conversions (tree exp)
11716 {
11717 while (tree_nop_conversion (exp))
11718 exp = TREE_OPERAND (exp, 0);
11719 return exp;
11720 }
11721
11722 /* Strip conversions from EXP according to tree_sign_nop_conversion
11723 and return the resulting expression. */
11724
11725 tree
11726 tree_strip_sign_nop_conversions (tree exp)
11727 {
11728 while (tree_sign_nop_conversion (exp))
11729 exp = TREE_OPERAND (exp, 0);
11730 return exp;
11731 }
11732
11733 /* Avoid any floating point extensions from EXP. */
11734 tree
11735 strip_float_extensions (tree exp)
11736 {
11737 tree sub, expt, subt;
11738
11739 /* For floating point constant look up the narrowest type that can hold
11740 it properly and handle it like (type)(narrowest_type)constant.
11741 This way we can optimize for instance a=a*2.0 where "a" is float
11742 but 2.0 is double constant. */
11743 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
11744 {
11745 REAL_VALUE_TYPE orig;
11746 tree type = NULL;
11747
11748 orig = TREE_REAL_CST (exp);
11749 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
11750 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
11751 type = float_type_node;
11752 else if (TYPE_PRECISION (TREE_TYPE (exp))
11753 > TYPE_PRECISION (double_type_node)
11754 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
11755 type = double_type_node;
11756 if (type)
11757 return build_real (type, real_value_truncate (TYPE_MODE (type), orig));
11758 }
11759
11760 if (!CONVERT_EXPR_P (exp))
11761 return exp;
11762
11763 sub = TREE_OPERAND (exp, 0);
11764 subt = TREE_TYPE (sub);
11765 expt = TREE_TYPE (exp);
11766
11767 if (!FLOAT_TYPE_P (subt))
11768 return exp;
11769
11770 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
11771 return exp;
11772
11773 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
11774 return exp;
11775
11776 return strip_float_extensions (sub);
11777 }
11778
11779 /* Strip out all handled components that produce invariant
11780 offsets. */
11781
11782 const_tree
11783 strip_invariant_refs (const_tree op)
11784 {
11785 while (handled_component_p (op))
11786 {
11787 switch (TREE_CODE (op))
11788 {
11789 case ARRAY_REF:
11790 case ARRAY_RANGE_REF:
11791 if (!is_gimple_constant (TREE_OPERAND (op, 1))
11792 || TREE_OPERAND (op, 2) != NULL_TREE
11793 || TREE_OPERAND (op, 3) != NULL_TREE)
11794 return NULL;
11795 break;
11796
11797 case COMPONENT_REF:
11798 if (TREE_OPERAND (op, 2) != NULL_TREE)
11799 return NULL;
11800 break;
11801
11802 default:;
11803 }
11804 op = TREE_OPERAND (op, 0);
11805 }
11806
11807 return op;
11808 }
11809
11810 static GTY(()) tree gcc_eh_personality_decl;
11811
11812 /* Return the GCC personality function decl. */
11813
11814 tree
11815 lhd_gcc_personality (void)
11816 {
11817 if (!gcc_eh_personality_decl)
11818 gcc_eh_personality_decl = build_personality_function ("gcc");
11819 return gcc_eh_personality_decl;
11820 }
11821
11822 /* TARGET is a call target of GIMPLE call statement
11823 (obtained by gimple_call_fn). Return true if it is
11824 OBJ_TYPE_REF representing an virtual call of C++ method.
11825 (As opposed to OBJ_TYPE_REF representing objc calls
11826 through a cast where middle-end devirtualization machinery
11827 can't apply.) */
11828
11829 bool
11830 virtual_method_call_p (tree target)
11831 {
11832 if (TREE_CODE (target) != OBJ_TYPE_REF)
11833 return false;
11834 target = TREE_TYPE (target);
11835 gcc_checking_assert (TREE_CODE (target) == POINTER_TYPE);
11836 target = TREE_TYPE (target);
11837 if (TREE_CODE (target) == FUNCTION_TYPE)
11838 return false;
11839 gcc_checking_assert (TREE_CODE (target) == METHOD_TYPE);
11840 return true;
11841 }
11842
11843 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
11844
11845 tree
11846 obj_type_ref_class (tree ref)
11847 {
11848 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
11849 ref = TREE_TYPE (ref);
11850 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
11851 ref = TREE_TYPE (ref);
11852 /* We look for type THIS points to. ObjC also builds
11853 OBJ_TYPE_REF with non-method calls, Their first parameter
11854 ID however also corresponds to class type. */
11855 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
11856 || TREE_CODE (ref) == FUNCTION_TYPE);
11857 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
11858 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
11859 return TREE_TYPE (ref);
11860 }
11861
11862 /* Return true if T is in anonymous namespace. */
11863
11864 bool
11865 type_in_anonymous_namespace_p (const_tree t)
11866 {
11867 /* TREE_PUBLIC of TYPE_STUB_DECL may not be properly set for
11868 bulitin types; those have CONTEXT NULL. */
11869 if (!TYPE_CONTEXT (t))
11870 return false;
11871 return (TYPE_STUB_DECL (t) && !TREE_PUBLIC (TYPE_STUB_DECL (t)));
11872 }
11873
11874 /* Try to find a base info of BINFO that would have its field decl at offset
11875 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
11876 found, return, otherwise return NULL_TREE. */
11877
11878 tree
11879 get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
11880 {
11881 tree type = BINFO_TYPE (binfo);
11882
11883 while (true)
11884 {
11885 HOST_WIDE_INT pos, size;
11886 tree fld;
11887 int i;
11888
11889 if (types_same_for_odr (type, expected_type))
11890 return binfo;
11891 if (offset < 0)
11892 return NULL_TREE;
11893
11894 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
11895 {
11896 if (TREE_CODE (fld) != FIELD_DECL)
11897 continue;
11898
11899 pos = int_bit_position (fld);
11900 size = tree_to_uhwi (DECL_SIZE (fld));
11901 if (pos <= offset && (pos + size) > offset)
11902 break;
11903 }
11904 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
11905 return NULL_TREE;
11906
11907 if (!DECL_ARTIFICIAL (fld))
11908 {
11909 binfo = TYPE_BINFO (TREE_TYPE (fld));
11910 if (!binfo)
11911 return NULL_TREE;
11912 }
11913 /* Offset 0 indicates the primary base, whose vtable contents are
11914 represented in the binfo for the derived class. */
11915 else if (offset != 0)
11916 {
11917 tree base_binfo, binfo2 = binfo;
11918
11919 /* Find BINFO corresponding to FLD. This is bit harder
11920 by a fact that in virtual inheritance we may need to walk down
11921 the non-virtual inheritance chain. */
11922 while (true)
11923 {
11924 tree containing_binfo = NULL, found_binfo = NULL;
11925 for (i = 0; BINFO_BASE_ITERATE (binfo2, i, base_binfo); i++)
11926 if (types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
11927 {
11928 found_binfo = base_binfo;
11929 break;
11930 }
11931 else
11932 if ((tree_to_shwi (BINFO_OFFSET (base_binfo))
11933 - tree_to_shwi (BINFO_OFFSET (binfo)))
11934 * BITS_PER_UNIT < pos
11935 /* Rule out types with no virtual methods or we can get confused
11936 here by zero sized bases. */
11937 && BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (base_binfo)))
11938 && (!containing_binfo
11939 || (tree_to_shwi (BINFO_OFFSET (containing_binfo))
11940 < tree_to_shwi (BINFO_OFFSET (base_binfo)))))
11941 containing_binfo = base_binfo;
11942 if (found_binfo)
11943 {
11944 binfo = found_binfo;
11945 break;
11946 }
11947 if (!containing_binfo)
11948 return NULL_TREE;
11949 binfo2 = containing_binfo;
11950 }
11951 }
11952
11953 type = TREE_TYPE (fld);
11954 offset -= pos;
11955 }
11956 }
11957
11958 /* Returns true if X is a typedef decl. */
11959
11960 bool
11961 is_typedef_decl (tree x)
11962 {
11963 return (x && TREE_CODE (x) == TYPE_DECL
11964 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
11965 }
11966
11967 /* Returns true iff TYPE is a type variant created for a typedef. */
11968
11969 bool
11970 typedef_variant_p (tree type)
11971 {
11972 return is_typedef_decl (TYPE_NAME (type));
11973 }
11974
11975 /* Warn about a use of an identifier which was marked deprecated. */
11976 void
11977 warn_deprecated_use (tree node, tree attr)
11978 {
11979 const char *msg;
11980
11981 if (node == 0 || !warn_deprecated_decl)
11982 return;
11983
11984 if (!attr)
11985 {
11986 if (DECL_P (node))
11987 attr = DECL_ATTRIBUTES (node);
11988 else if (TYPE_P (node))
11989 {
11990 tree decl = TYPE_STUB_DECL (node);
11991 if (decl)
11992 attr = lookup_attribute ("deprecated",
11993 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
11994 }
11995 }
11996
11997 if (attr)
11998 attr = lookup_attribute ("deprecated", attr);
11999
12000 if (attr)
12001 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
12002 else
12003 msg = NULL;
12004
12005 if (DECL_P (node))
12006 {
12007 expanded_location xloc = expand_location (DECL_SOURCE_LOCATION (node));
12008 if (msg)
12009 warning (OPT_Wdeprecated_declarations,
12010 "%qD is deprecated (declared at %r%s:%d%R): %s",
12011 node, "locus", xloc.file, xloc.line, msg);
12012 else
12013 warning (OPT_Wdeprecated_declarations,
12014 "%qD is deprecated (declared at %r%s:%d%R)",
12015 node, "locus", xloc.file, xloc.line);
12016 }
12017 else if (TYPE_P (node))
12018 {
12019 tree what = NULL_TREE;
12020 tree decl = TYPE_STUB_DECL (node);
12021
12022 if (TYPE_NAME (node))
12023 {
12024 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12025 what = TYPE_NAME (node);
12026 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12027 && DECL_NAME (TYPE_NAME (node)))
12028 what = DECL_NAME (TYPE_NAME (node));
12029 }
12030
12031 if (decl)
12032 {
12033 expanded_location xloc
12034 = expand_location (DECL_SOURCE_LOCATION (decl));
12035 if (what)
12036 {
12037 if (msg)
12038 warning (OPT_Wdeprecated_declarations,
12039 "%qE is deprecated (declared at %r%s:%d%R): %s",
12040 what, "locus", xloc.file, xloc.line, msg);
12041 else
12042 warning (OPT_Wdeprecated_declarations,
12043 "%qE is deprecated (declared at %r%s:%d%R)",
12044 what, "locus", xloc.file, xloc.line);
12045 }
12046 else
12047 {
12048 if (msg)
12049 warning (OPT_Wdeprecated_declarations,
12050 "type is deprecated (declared at %r%s:%d%R): %s",
12051 "locus", xloc.file, xloc.line, msg);
12052 else
12053 warning (OPT_Wdeprecated_declarations,
12054 "type is deprecated (declared at %r%s:%d%R)",
12055 "locus", xloc.file, xloc.line);
12056 }
12057 }
12058 else
12059 {
12060 if (what)
12061 {
12062 if (msg)
12063 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
12064 what, msg);
12065 else
12066 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
12067 }
12068 else
12069 {
12070 if (msg)
12071 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
12072 msg);
12073 else
12074 warning (OPT_Wdeprecated_declarations, "type is deprecated");
12075 }
12076 }
12077 }
12078 }
12079
12080 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12081 somewhere in it. */
12082
12083 bool
12084 contains_bitfld_component_ref_p (const_tree ref)
12085 {
12086 while (handled_component_p (ref))
12087 {
12088 if (TREE_CODE (ref) == COMPONENT_REF
12089 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12090 return true;
12091 ref = TREE_OPERAND (ref, 0);
12092 }
12093
12094 return false;
12095 }
12096
12097 /* Try to determine whether a TRY_CATCH expression can fall through.
12098 This is a subroutine of block_may_fallthru. */
12099
12100 static bool
12101 try_catch_may_fallthru (const_tree stmt)
12102 {
12103 tree_stmt_iterator i;
12104
12105 /* If the TRY block can fall through, the whole TRY_CATCH can
12106 fall through. */
12107 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12108 return true;
12109
12110 i = tsi_start (TREE_OPERAND (stmt, 1));
12111 switch (TREE_CODE (tsi_stmt (i)))
12112 {
12113 case CATCH_EXPR:
12114 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12115 catch expression and a body. The whole TRY_CATCH may fall
12116 through iff any of the catch bodies falls through. */
12117 for (; !tsi_end_p (i); tsi_next (&i))
12118 {
12119 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12120 return true;
12121 }
12122 return false;
12123
12124 case EH_FILTER_EXPR:
12125 /* The exception filter expression only matters if there is an
12126 exception. If the exception does not match EH_FILTER_TYPES,
12127 we will execute EH_FILTER_FAILURE, and we will fall through
12128 if that falls through. If the exception does match
12129 EH_FILTER_TYPES, the stack unwinder will continue up the
12130 stack, so we will not fall through. We don't know whether we
12131 will throw an exception which matches EH_FILTER_TYPES or not,
12132 so we just ignore EH_FILTER_TYPES and assume that we might
12133 throw an exception which doesn't match. */
12134 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12135
12136 default:
12137 /* This case represents statements to be executed when an
12138 exception occurs. Those statements are implicitly followed
12139 by a RESX statement to resume execution after the exception.
12140 So in this case the TRY_CATCH never falls through. */
12141 return false;
12142 }
12143 }
12144
12145 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12146 need not be 100% accurate; simply be conservative and return true if we
12147 don't know. This is used only to avoid stupidly generating extra code.
12148 If we're wrong, we'll just delete the extra code later. */
12149
12150 bool
12151 block_may_fallthru (const_tree block)
12152 {
12153 /* This CONST_CAST is okay because expr_last returns its argument
12154 unmodified and we assign it to a const_tree. */
12155 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12156
12157 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12158 {
12159 case GOTO_EXPR:
12160 case RETURN_EXPR:
12161 /* Easy cases. If the last statement of the block implies
12162 control transfer, then we can't fall through. */
12163 return false;
12164
12165 case SWITCH_EXPR:
12166 /* If SWITCH_LABELS is set, this is lowered, and represents a
12167 branch to a selected label and hence can not fall through.
12168 Otherwise SWITCH_BODY is set, and the switch can fall
12169 through. */
12170 return SWITCH_LABELS (stmt) == NULL_TREE;
12171
12172 case COND_EXPR:
12173 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12174 return true;
12175 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12176
12177 case BIND_EXPR:
12178 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12179
12180 case TRY_CATCH_EXPR:
12181 return try_catch_may_fallthru (stmt);
12182
12183 case TRY_FINALLY_EXPR:
12184 /* The finally clause is always executed after the try clause,
12185 so if it does not fall through, then the try-finally will not
12186 fall through. Otherwise, if the try clause does not fall
12187 through, then when the finally clause falls through it will
12188 resume execution wherever the try clause was going. So the
12189 whole try-finally will only fall through if both the try
12190 clause and the finally clause fall through. */
12191 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12192 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12193
12194 case MODIFY_EXPR:
12195 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12196 stmt = TREE_OPERAND (stmt, 1);
12197 else
12198 return true;
12199 /* FALLTHRU */
12200
12201 case CALL_EXPR:
12202 /* Functions that do not return do not fall through. */
12203 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12204
12205 case CLEANUP_POINT_EXPR:
12206 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12207
12208 case TARGET_EXPR:
12209 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12210
12211 case ERROR_MARK:
12212 return true;
12213
12214 default:
12215 return lang_hooks.block_may_fallthru (stmt);
12216 }
12217 }
12218
12219 /* True if we are using EH to handle cleanups. */
12220 static bool using_eh_for_cleanups_flag = false;
12221
12222 /* This routine is called from front ends to indicate eh should be used for
12223 cleanups. */
12224 void
12225 using_eh_for_cleanups (void)
12226 {
12227 using_eh_for_cleanups_flag = true;
12228 }
12229
12230 /* Query whether EH is used for cleanups. */
12231 bool
12232 using_eh_for_cleanups_p (void)
12233 {
12234 return using_eh_for_cleanups_flag;
12235 }
12236
12237 /* Wrapper for tree_code_name to ensure that tree code is valid */
12238 const char *
12239 get_tree_code_name (enum tree_code code)
12240 {
12241 const char *invalid = "<invalid tree code>";
12242
12243 if (code >= MAX_TREE_CODES)
12244 return invalid;
12245
12246 return tree_code_name[code];
12247 }
12248
12249 /* Drops the TREE_OVERFLOW flag from T. */
12250
12251 tree
12252 drop_tree_overflow (tree t)
12253 {
12254 gcc_checking_assert (TREE_OVERFLOW (t));
12255
12256 /* For tree codes with a sharing machinery re-build the result. */
12257 if (TREE_CODE (t) == INTEGER_CST)
12258 return wide_int_to_tree (TREE_TYPE (t), t);
12259
12260 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12261 and drop the flag. */
12262 t = copy_node (t);
12263 TREE_OVERFLOW (t) = 0;
12264 return t;
12265 }
12266
12267 /* Given a memory reference expression T, return its base address.
12268 The base address of a memory reference expression is the main
12269 object being referenced. For instance, the base address for
12270 'array[i].fld[j]' is 'array'. You can think of this as stripping
12271 away the offset part from a memory address.
12272
12273 This function calls handled_component_p to strip away all the inner
12274 parts of the memory reference until it reaches the base object. */
12275
12276 tree
12277 get_base_address (tree t)
12278 {
12279 while (handled_component_p (t))
12280 t = TREE_OPERAND (t, 0);
12281
12282 if ((TREE_CODE (t) == MEM_REF
12283 || TREE_CODE (t) == TARGET_MEM_REF)
12284 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12285 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12286
12287 /* ??? Either the alias oracle or all callers need to properly deal
12288 with WITH_SIZE_EXPRs before we can look through those. */
12289 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12290 return NULL_TREE;
12291
12292 return t;
12293 }
12294
12295 #include "gt-tree.h"