]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree.c
Merge from trunk.
[thirdparty/gcc.git] / gcc / tree.c
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
26
27 It is intended to be language-independent, but occasionally
28 calls language-dependent routines defined (for C) in typecheck.c. */
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "tm.h"
34 #include "flags.h"
35 #include "tree.h"
36 #include "stor-layout.h"
37 #include "calls.h"
38 #include "attribs.h"
39 #include "varasm.h"
40 #include "tm_p.h"
41 #include "function.h"
42 #include "obstack.h"
43 #include "toplev.h" /* get_random_seed */
44 #include "ggc.h"
45 #include "hashtab.h"
46 #include "filenames.h"
47 #include "output.h"
48 #include "target.h"
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "basic-block.h"
54 #include "bitmap.h"
55 #include "gimple.h"
56 #include "gimple-iterator.h"
57 #include "gimplify.h"
58 #include "gimple-ssa.h"
59 #include "cgraph.h"
60 #include "tree-phinodes.h"
61 #include "stringpool.h"
62 #include "tree-ssanames.h"
63 #include "expr.h"
64 #include "tree-dfa.h"
65 #include "params.h"
66 #include "pointer-set.h"
67 #include "tree-pass.h"
68 #include "langhooks-def.h"
69 #include "diagnostic.h"
70 #include "tree-diagnostic.h"
71 #include "tree-pretty-print.h"
72 #include "except.h"
73 #include "debug.h"
74 #include "intl.h"
75 #include "wide-int.h"
76
77 /* Tree code classes. */
78
79 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
80 #define END_OF_BASE_TREE_CODES tcc_exceptional,
81
82 const enum tree_code_class tree_code_type[] = {
83 #include "all-tree.def"
84 };
85
86 #undef DEFTREECODE
87 #undef END_OF_BASE_TREE_CODES
88
89 /* Table indexed by tree code giving number of expression
90 operands beyond the fixed part of the node structure.
91 Not used for types or decls. */
92
93 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
94 #define END_OF_BASE_TREE_CODES 0,
95
96 const unsigned char tree_code_length[] = {
97 #include "all-tree.def"
98 };
99
100 #undef DEFTREECODE
101 #undef END_OF_BASE_TREE_CODES
102
103 /* Names of tree components.
104 Used for printing out the tree and error messages. */
105 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
106 #define END_OF_BASE_TREE_CODES "@dummy",
107
108 static const char *const tree_code_name[] = {
109 #include "all-tree.def"
110 };
111
112 #undef DEFTREECODE
113 #undef END_OF_BASE_TREE_CODES
114
115 /* Each tree code class has an associated string representation.
116 These must correspond to the tree_code_class entries. */
117
118 const char *const tree_code_class_strings[] =
119 {
120 "exceptional",
121 "constant",
122 "type",
123 "declaration",
124 "reference",
125 "comparison",
126 "unary",
127 "binary",
128 "statement",
129 "vl_exp",
130 "expression"
131 };
132
133 /* obstack.[ch] explicitly declined to prototype this. */
134 extern int _obstack_allocated_p (struct obstack *h, void *obj);
135
136 /* Statistics-gathering stuff. */
137
138 static int tree_code_counts[MAX_TREE_CODES];
139 int tree_node_counts[(int) all_kinds];
140 int tree_node_sizes[(int) all_kinds];
141
142 /* Keep in sync with tree.h:enum tree_node_kind. */
143 static const char * const tree_node_kind_names[] = {
144 "decls",
145 "types",
146 "blocks",
147 "stmts",
148 "refs",
149 "exprs",
150 "constants",
151 "identifiers",
152 "vecs",
153 "binfos",
154 "ssa names",
155 "constructors",
156 "random kinds",
157 "lang_decl kinds",
158 "lang_type kinds",
159 "omp clauses",
160 };
161
162 /* Unique id for next decl created. */
163 static GTY(()) int next_decl_uid;
164 /* Unique id for next type created. */
165 static GTY(()) int next_type_uid = 1;
166 /* Unique id for next debug decl created. Use negative numbers,
167 to catch erroneous uses. */
168 static GTY(()) int next_debug_decl_uid;
169
170 /* Since we cannot rehash a type after it is in the table, we have to
171 keep the hash code. */
172
173 struct GTY(()) type_hash {
174 unsigned long hash;
175 tree type;
176 };
177
178 /* Initial size of the hash table (rounded to next prime). */
179 #define TYPE_HASH_INITIAL_SIZE 1000
180
181 /* Now here is the hash table. When recording a type, it is added to
182 the slot whose index is the hash code. Note that the hash table is
183 used for several kinds of types (function types, array types and
184 array index range types, for now). While all these live in the
185 same table, they are completely independent, and the hash code is
186 computed differently for each of these. */
187
188 static GTY ((if_marked ("type_hash_marked_p"), param_is (struct type_hash)))
189 htab_t type_hash_table;
190
191 /* Hash table and temporary node for larger integer const values. */
192 static GTY (()) tree int_cst_node;
193 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
194 htab_t int_cst_hash_table;
195
196 /* Hash table for optimization flags and target option flags. Use the same
197 hash table for both sets of options. Nodes for building the current
198 optimization and target option nodes. The assumption is most of the time
199 the options created will already be in the hash table, so we avoid
200 allocating and freeing up a node repeatably. */
201 static GTY (()) tree cl_optimization_node;
202 static GTY (()) tree cl_target_option_node;
203 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
204 htab_t cl_option_hash_table;
205
206 /* General tree->tree mapping structure for use in hash tables. */
207
208
209 static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map)))
210 htab_t debug_expr_for_decl;
211
212 static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map)))
213 htab_t value_expr_for_decl;
214
215 static GTY ((if_marked ("tree_vec_map_marked_p"), param_is (struct tree_vec_map)))
216 htab_t debug_args_for_decl;
217
218 static GTY ((if_marked ("tree_priority_map_marked_p"),
219 param_is (struct tree_priority_map)))
220 htab_t init_priority_for_decl;
221
222 static void set_type_quals (tree, int);
223 static int type_hash_eq (const void *, const void *);
224 static hashval_t type_hash_hash (const void *);
225 static hashval_t int_cst_hash_hash (const void *);
226 static int int_cst_hash_eq (const void *, const void *);
227 static hashval_t cl_option_hash_hash (const void *);
228 static int cl_option_hash_eq (const void *, const void *);
229 static void print_type_hash_statistics (void);
230 static void print_debug_expr_statistics (void);
231 static void print_value_expr_statistics (void);
232 static int type_hash_marked_p (const void *);
233 static unsigned int type_hash_list (const_tree, hashval_t);
234 static unsigned int attribute_hash_list (const_tree, hashval_t);
235 static bool decls_same_for_odr (tree decl1, tree decl2);
236
237 tree global_trees[TI_MAX];
238 tree integer_types[itk_none];
239
240 unsigned char tree_contains_struct[MAX_TREE_CODES][64];
241
242 /* Number of operands for each OpenMP clause. */
243 unsigned const char omp_clause_num_ops[] =
244 {
245 0, /* OMP_CLAUSE_ERROR */
246 1, /* OMP_CLAUSE_PRIVATE */
247 1, /* OMP_CLAUSE_SHARED */
248 1, /* OMP_CLAUSE_FIRSTPRIVATE */
249 2, /* OMP_CLAUSE_LASTPRIVATE */
250 4, /* OMP_CLAUSE_REDUCTION */
251 1, /* OMP_CLAUSE_COPYIN */
252 1, /* OMP_CLAUSE_COPYPRIVATE */
253 2, /* OMP_CLAUSE_LINEAR */
254 2, /* OMP_CLAUSE_ALIGNED */
255 1, /* OMP_CLAUSE_DEPEND */
256 1, /* OMP_CLAUSE_UNIFORM */
257 2, /* OMP_CLAUSE_FROM */
258 2, /* OMP_CLAUSE_TO */
259 2, /* OMP_CLAUSE_MAP */
260 1, /* OMP_CLAUSE__LOOPTEMP_ */
261 1, /* OMP_CLAUSE_IF */
262 1, /* OMP_CLAUSE_NUM_THREADS */
263 1, /* OMP_CLAUSE_SCHEDULE */
264 0, /* OMP_CLAUSE_NOWAIT */
265 0, /* OMP_CLAUSE_ORDERED */
266 0, /* OMP_CLAUSE_DEFAULT */
267 3, /* OMP_CLAUSE_COLLAPSE */
268 0, /* OMP_CLAUSE_UNTIED */
269 1, /* OMP_CLAUSE_FINAL */
270 0, /* OMP_CLAUSE_MERGEABLE */
271 1, /* OMP_CLAUSE_DEVICE */
272 1, /* OMP_CLAUSE_DIST_SCHEDULE */
273 0, /* OMP_CLAUSE_INBRANCH */
274 0, /* OMP_CLAUSE_NOTINBRANCH */
275 1, /* OMP_CLAUSE_NUM_TEAMS */
276 1, /* OMP_CLAUSE_THREAD_LIMIT */
277 0, /* OMP_CLAUSE_PROC_BIND */
278 1, /* OMP_CLAUSE_SAFELEN */
279 1, /* OMP_CLAUSE_SIMDLEN */
280 0, /* OMP_CLAUSE_FOR */
281 0, /* OMP_CLAUSE_PARALLEL */
282 0, /* OMP_CLAUSE_SECTIONS */
283 0, /* OMP_CLAUSE_TASKGROUP */
284 1, /* OMP_CLAUSE__SIMDUID_ */
285 };
286
287 const char * const omp_clause_code_name[] =
288 {
289 "error_clause",
290 "private",
291 "shared",
292 "firstprivate",
293 "lastprivate",
294 "reduction",
295 "copyin",
296 "copyprivate",
297 "linear",
298 "aligned",
299 "depend",
300 "uniform",
301 "from",
302 "to",
303 "map",
304 "_looptemp_",
305 "if",
306 "num_threads",
307 "schedule",
308 "nowait",
309 "ordered",
310 "default",
311 "collapse",
312 "untied",
313 "final",
314 "mergeable",
315 "device",
316 "dist_schedule",
317 "inbranch",
318 "notinbranch",
319 "num_teams",
320 "thread_limit",
321 "proc_bind",
322 "safelen",
323 "simdlen",
324 "for",
325 "parallel",
326 "sections",
327 "taskgroup",
328 "_simduid_"
329 };
330
331
332 /* Return the tree node structure used by tree code CODE. */
333
334 static inline enum tree_node_structure_enum
335 tree_node_structure_for_code (enum tree_code code)
336 {
337 switch (TREE_CODE_CLASS (code))
338 {
339 case tcc_declaration:
340 {
341 switch (code)
342 {
343 case FIELD_DECL:
344 return TS_FIELD_DECL;
345 case PARM_DECL:
346 return TS_PARM_DECL;
347 case VAR_DECL:
348 return TS_VAR_DECL;
349 case LABEL_DECL:
350 return TS_LABEL_DECL;
351 case RESULT_DECL:
352 return TS_RESULT_DECL;
353 case DEBUG_EXPR_DECL:
354 return TS_DECL_WRTL;
355 case CONST_DECL:
356 return TS_CONST_DECL;
357 case TYPE_DECL:
358 return TS_TYPE_DECL;
359 case FUNCTION_DECL:
360 return TS_FUNCTION_DECL;
361 case TRANSLATION_UNIT_DECL:
362 return TS_TRANSLATION_UNIT_DECL;
363 default:
364 return TS_DECL_NON_COMMON;
365 }
366 }
367 case tcc_type:
368 return TS_TYPE_NON_COMMON;
369 case tcc_reference:
370 case tcc_comparison:
371 case tcc_unary:
372 case tcc_binary:
373 case tcc_expression:
374 case tcc_statement:
375 case tcc_vl_exp:
376 return TS_EXP;
377 default: /* tcc_constant and tcc_exceptional */
378 break;
379 }
380 switch (code)
381 {
382 /* tcc_constant cases. */
383 case INTEGER_CST: return TS_INT_CST;
384 case REAL_CST: return TS_REAL_CST;
385 case FIXED_CST: return TS_FIXED_CST;
386 case COMPLEX_CST: return TS_COMPLEX;
387 case VECTOR_CST: return TS_VECTOR;
388 case STRING_CST: return TS_STRING;
389 /* tcc_exceptional cases. */
390 case ERROR_MARK: return TS_COMMON;
391 case IDENTIFIER_NODE: return TS_IDENTIFIER;
392 case TREE_LIST: return TS_LIST;
393 case TREE_VEC: return TS_VEC;
394 case SSA_NAME: return TS_SSA_NAME;
395 case PLACEHOLDER_EXPR: return TS_COMMON;
396 case STATEMENT_LIST: return TS_STATEMENT_LIST;
397 case BLOCK: return TS_BLOCK;
398 case CONSTRUCTOR: return TS_CONSTRUCTOR;
399 case TREE_BINFO: return TS_BINFO;
400 case OMP_CLAUSE: return TS_OMP_CLAUSE;
401 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
402 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
403
404 default:
405 gcc_unreachable ();
406 }
407 }
408
409
410 /* Initialize tree_contains_struct to describe the hierarchy of tree
411 nodes. */
412
413 static void
414 initialize_tree_contains_struct (void)
415 {
416 unsigned i;
417
418 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
419 {
420 enum tree_code code;
421 enum tree_node_structure_enum ts_code;
422
423 code = (enum tree_code) i;
424 ts_code = tree_node_structure_for_code (code);
425
426 /* Mark the TS structure itself. */
427 tree_contains_struct[code][ts_code] = 1;
428
429 /* Mark all the structures that TS is derived from. */
430 switch (ts_code)
431 {
432 case TS_TYPED:
433 case TS_BLOCK:
434 MARK_TS_BASE (code);
435 break;
436
437 case TS_COMMON:
438 case TS_INT_CST:
439 case TS_REAL_CST:
440 case TS_FIXED_CST:
441 case TS_VECTOR:
442 case TS_STRING:
443 case TS_COMPLEX:
444 case TS_SSA_NAME:
445 case TS_CONSTRUCTOR:
446 case TS_EXP:
447 case TS_STATEMENT_LIST:
448 MARK_TS_TYPED (code);
449 break;
450
451 case TS_IDENTIFIER:
452 case TS_DECL_MINIMAL:
453 case TS_TYPE_COMMON:
454 case TS_LIST:
455 case TS_VEC:
456 case TS_BINFO:
457 case TS_OMP_CLAUSE:
458 case TS_OPTIMIZATION:
459 case TS_TARGET_OPTION:
460 MARK_TS_COMMON (code);
461 break;
462
463 case TS_TYPE_WITH_LANG_SPECIFIC:
464 MARK_TS_TYPE_COMMON (code);
465 break;
466
467 case TS_TYPE_NON_COMMON:
468 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
469 break;
470
471 case TS_DECL_COMMON:
472 MARK_TS_DECL_MINIMAL (code);
473 break;
474
475 case TS_DECL_WRTL:
476 case TS_CONST_DECL:
477 MARK_TS_DECL_COMMON (code);
478 break;
479
480 case TS_DECL_NON_COMMON:
481 MARK_TS_DECL_WITH_VIS (code);
482 break;
483
484 case TS_DECL_WITH_VIS:
485 case TS_PARM_DECL:
486 case TS_LABEL_DECL:
487 case TS_RESULT_DECL:
488 MARK_TS_DECL_WRTL (code);
489 break;
490
491 case TS_FIELD_DECL:
492 MARK_TS_DECL_COMMON (code);
493 break;
494
495 case TS_VAR_DECL:
496 MARK_TS_DECL_WITH_VIS (code);
497 break;
498
499 case TS_TYPE_DECL:
500 case TS_FUNCTION_DECL:
501 MARK_TS_DECL_NON_COMMON (code);
502 break;
503
504 case TS_TRANSLATION_UNIT_DECL:
505 MARK_TS_DECL_COMMON (code);
506 break;
507
508 default:
509 gcc_unreachable ();
510 }
511 }
512
513 /* Basic consistency checks for attributes used in fold. */
514 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
515 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
516 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
517 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
518 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
519 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
520 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
521 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
522 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
523 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
524 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
525 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
526 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
527 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
528 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
529 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
530 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
531 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
532 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
533 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
534 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
535 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
536 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
537 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
538 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
539 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
540 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
541 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
542 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
543 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
544 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
545 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
546 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
547 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
548 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
549 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
550 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
551 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
552 }
553
554
555 /* Init tree.c. */
556
557 void
558 init_ttree (void)
559 {
560 /* Initialize the hash table of types. */
561 type_hash_table = htab_create_ggc (TYPE_HASH_INITIAL_SIZE, type_hash_hash,
562 type_hash_eq, 0);
563
564 debug_expr_for_decl = htab_create_ggc (512, tree_decl_map_hash,
565 tree_decl_map_eq, 0);
566
567 value_expr_for_decl = htab_create_ggc (512, tree_decl_map_hash,
568 tree_decl_map_eq, 0);
569 init_priority_for_decl = htab_create_ggc (512, tree_priority_map_hash,
570 tree_priority_map_eq, 0);
571
572 int_cst_hash_table = htab_create_ggc (1024, int_cst_hash_hash,
573 int_cst_hash_eq, NULL);
574
575 int_cst_node = make_int_cst (1, 1);
576
577 cl_option_hash_table = htab_create_ggc (64, cl_option_hash_hash,
578 cl_option_hash_eq, NULL);
579
580 cl_optimization_node = make_node (OPTIMIZATION_NODE);
581 cl_target_option_node = make_node (TARGET_OPTION_NODE);
582
583 /* Initialize the tree_contains_struct array. */
584 initialize_tree_contains_struct ();
585 lang_hooks.init_ts ();
586 }
587
588 \f
589 /* The name of the object as the assembler will see it (but before any
590 translations made by ASM_OUTPUT_LABELREF). Often this is the same
591 as DECL_NAME. It is an IDENTIFIER_NODE. */
592 tree
593 decl_assembler_name (tree decl)
594 {
595 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
596 lang_hooks.set_decl_assembler_name (decl);
597 return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name;
598 }
599
600 /* Compute the number of bytes occupied by a tree with code CODE.
601 This function cannot be used for nodes that have variable sizes,
602 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
603 size_t
604 tree_code_size (enum tree_code code)
605 {
606 switch (TREE_CODE_CLASS (code))
607 {
608 case tcc_declaration: /* A decl node */
609 {
610 switch (code)
611 {
612 case FIELD_DECL:
613 return sizeof (struct tree_field_decl);
614 case PARM_DECL:
615 return sizeof (struct tree_parm_decl);
616 case VAR_DECL:
617 return sizeof (struct tree_var_decl);
618 case LABEL_DECL:
619 return sizeof (struct tree_label_decl);
620 case RESULT_DECL:
621 return sizeof (struct tree_result_decl);
622 case CONST_DECL:
623 return sizeof (struct tree_const_decl);
624 case TYPE_DECL:
625 return sizeof (struct tree_type_decl);
626 case FUNCTION_DECL:
627 return sizeof (struct tree_function_decl);
628 case DEBUG_EXPR_DECL:
629 return sizeof (struct tree_decl_with_rtl);
630 default:
631 return sizeof (struct tree_decl_non_common);
632 }
633 }
634
635 case tcc_type: /* a type node */
636 return sizeof (struct tree_type_non_common);
637
638 case tcc_reference: /* a reference */
639 case tcc_expression: /* an expression */
640 case tcc_statement: /* an expression with side effects */
641 case tcc_comparison: /* a comparison expression */
642 case tcc_unary: /* a unary arithmetic expression */
643 case tcc_binary: /* a binary arithmetic expression */
644 return (sizeof (struct tree_exp)
645 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
646
647 case tcc_constant: /* a constant */
648 switch (code)
649 {
650 case INTEGER_CST: gcc_unreachable ();
651 case REAL_CST: return sizeof (struct tree_real_cst);
652 case FIXED_CST: return sizeof (struct tree_fixed_cst);
653 case COMPLEX_CST: return sizeof (struct tree_complex);
654 case VECTOR_CST: return sizeof (struct tree_vector);
655 case STRING_CST: gcc_unreachable ();
656 default:
657 return lang_hooks.tree_size (code);
658 }
659
660 case tcc_exceptional: /* something random, like an identifier. */
661 switch (code)
662 {
663 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
664 case TREE_LIST: return sizeof (struct tree_list);
665
666 case ERROR_MARK:
667 case PLACEHOLDER_EXPR: return sizeof (struct tree_common);
668
669 case TREE_VEC:
670 case OMP_CLAUSE: gcc_unreachable ();
671
672 case SSA_NAME: return sizeof (struct tree_ssa_name);
673
674 case STATEMENT_LIST: return sizeof (struct tree_statement_list);
675 case BLOCK: return sizeof (struct tree_block);
676 case CONSTRUCTOR: return sizeof (struct tree_constructor);
677 case OPTIMIZATION_NODE: return sizeof (struct tree_optimization_option);
678 case TARGET_OPTION_NODE: return sizeof (struct tree_target_option);
679
680 default:
681 return lang_hooks.tree_size (code);
682 }
683
684 default:
685 gcc_unreachable ();
686 }
687 }
688
689 /* Compute the number of bytes occupied by NODE. This routine only
690 looks at TREE_CODE, except for those nodes that have variable sizes. */
691 size_t
692 tree_size (const_tree node)
693 {
694 const enum tree_code code = TREE_CODE (node);
695 switch (code)
696 {
697 case INTEGER_CST:
698 return (sizeof (struct tree_int_cst)
699 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
700
701 case TREE_BINFO:
702 return (offsetof (struct tree_binfo, base_binfos)
703 + vec<tree, va_gc>
704 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
705
706 case TREE_VEC:
707 return (sizeof (struct tree_vec)
708 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
709
710 case VECTOR_CST:
711 return (sizeof (struct tree_vector)
712 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node)) - 1) * sizeof (tree));
713
714 case STRING_CST:
715 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
716
717 case OMP_CLAUSE:
718 return (sizeof (struct tree_omp_clause)
719 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
720 * sizeof (tree));
721
722 default:
723 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
724 return (sizeof (struct tree_exp)
725 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
726 else
727 return tree_code_size (code);
728 }
729 }
730
731 /* Record interesting allocation statistics for a tree node with CODE
732 and LENGTH. */
733
734 static void
735 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
736 size_t length ATTRIBUTE_UNUSED)
737 {
738 enum tree_code_class type = TREE_CODE_CLASS (code);
739 tree_node_kind kind;
740
741 if (!GATHER_STATISTICS)
742 return;
743
744 switch (type)
745 {
746 case tcc_declaration: /* A decl node */
747 kind = d_kind;
748 break;
749
750 case tcc_type: /* a type node */
751 kind = t_kind;
752 break;
753
754 case tcc_statement: /* an expression with side effects */
755 kind = s_kind;
756 break;
757
758 case tcc_reference: /* a reference */
759 kind = r_kind;
760 break;
761
762 case tcc_expression: /* an expression */
763 case tcc_comparison: /* a comparison expression */
764 case tcc_unary: /* a unary arithmetic expression */
765 case tcc_binary: /* a binary arithmetic expression */
766 kind = e_kind;
767 break;
768
769 case tcc_constant: /* a constant */
770 kind = c_kind;
771 break;
772
773 case tcc_exceptional: /* something random, like an identifier. */
774 switch (code)
775 {
776 case IDENTIFIER_NODE:
777 kind = id_kind;
778 break;
779
780 case TREE_VEC:
781 kind = vec_kind;
782 break;
783
784 case TREE_BINFO:
785 kind = binfo_kind;
786 break;
787
788 case SSA_NAME:
789 kind = ssa_name_kind;
790 break;
791
792 case BLOCK:
793 kind = b_kind;
794 break;
795
796 case CONSTRUCTOR:
797 kind = constr_kind;
798 break;
799
800 case OMP_CLAUSE:
801 kind = omp_clause_kind;
802 break;
803
804 default:
805 kind = x_kind;
806 break;
807 }
808 break;
809
810 case tcc_vl_exp:
811 kind = e_kind;
812 break;
813
814 default:
815 gcc_unreachable ();
816 }
817
818 tree_code_counts[(int) code]++;
819 tree_node_counts[(int) kind]++;
820 tree_node_sizes[(int) kind] += length;
821 }
822
823 /* Allocate and return a new UID from the DECL_UID namespace. */
824
825 int
826 allocate_decl_uid (void)
827 {
828 return next_decl_uid++;
829 }
830
831 /* Return a newly allocated node of code CODE. For decl and type
832 nodes, some other fields are initialized. The rest of the node is
833 initialized to zero. This function cannot be used for TREE_VEC,
834 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
835 tree_code_size.
836
837 Achoo! I got a code in the node. */
838
839 tree
840 make_node_stat (enum tree_code code MEM_STAT_DECL)
841 {
842 tree t;
843 enum tree_code_class type = TREE_CODE_CLASS (code);
844 size_t length = tree_code_size (code);
845
846 record_node_allocation_statistics (code, length);
847
848 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
849 TREE_SET_CODE (t, code);
850
851 switch (type)
852 {
853 case tcc_statement:
854 TREE_SIDE_EFFECTS (t) = 1;
855 break;
856
857 case tcc_declaration:
858 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
859 {
860 if (code == FUNCTION_DECL)
861 {
862 DECL_ALIGN (t) = FUNCTION_BOUNDARY;
863 DECL_MODE (t) = FUNCTION_MODE;
864 }
865 else
866 DECL_ALIGN (t) = 1;
867 }
868 DECL_SOURCE_LOCATION (t) = input_location;
869 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
870 DECL_UID (t) = --next_debug_decl_uid;
871 else
872 {
873 DECL_UID (t) = allocate_decl_uid ();
874 SET_DECL_PT_UID (t, -1);
875 }
876 if (TREE_CODE (t) == LABEL_DECL)
877 LABEL_DECL_UID (t) = -1;
878
879 break;
880
881 case tcc_type:
882 TYPE_UID (t) = next_type_uid++;
883 TYPE_ALIGN (t) = BITS_PER_UNIT;
884 TYPE_USER_ALIGN (t) = 0;
885 TYPE_MAIN_VARIANT (t) = t;
886 TYPE_CANONICAL (t) = t;
887
888 /* Default to no attributes for type, but let target change that. */
889 TYPE_ATTRIBUTES (t) = NULL_TREE;
890 targetm.set_default_type_attributes (t);
891
892 /* We have not yet computed the alias set for this type. */
893 TYPE_ALIAS_SET (t) = -1;
894 break;
895
896 case tcc_constant:
897 TREE_CONSTANT (t) = 1;
898 break;
899
900 case tcc_expression:
901 switch (code)
902 {
903 case INIT_EXPR:
904 case MODIFY_EXPR:
905 case VA_ARG_EXPR:
906 case PREDECREMENT_EXPR:
907 case PREINCREMENT_EXPR:
908 case POSTDECREMENT_EXPR:
909 case POSTINCREMENT_EXPR:
910 /* All of these have side-effects, no matter what their
911 operands are. */
912 TREE_SIDE_EFFECTS (t) = 1;
913 break;
914
915 default:
916 break;
917 }
918 break;
919
920 default:
921 /* Other classes need no special treatment. */
922 break;
923 }
924
925 return t;
926 }
927 \f
928 /* Return a new node with the same contents as NODE except that its
929 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
930
931 tree
932 copy_node_stat (tree node MEM_STAT_DECL)
933 {
934 tree t;
935 enum tree_code code = TREE_CODE (node);
936 size_t length;
937
938 gcc_assert (code != STATEMENT_LIST);
939
940 length = tree_size (node);
941 record_node_allocation_statistics (code, length);
942 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
943 memcpy (t, node, length);
944
945 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
946 TREE_CHAIN (t) = 0;
947 TREE_ASM_WRITTEN (t) = 0;
948 TREE_VISITED (t) = 0;
949
950 if (TREE_CODE_CLASS (code) == tcc_declaration)
951 {
952 if (code == DEBUG_EXPR_DECL)
953 DECL_UID (t) = --next_debug_decl_uid;
954 else
955 {
956 DECL_UID (t) = allocate_decl_uid ();
957 if (DECL_PT_UID_SET_P (node))
958 SET_DECL_PT_UID (t, DECL_PT_UID (node));
959 }
960 if ((TREE_CODE (node) == PARM_DECL || TREE_CODE (node) == VAR_DECL)
961 && DECL_HAS_VALUE_EXPR_P (node))
962 {
963 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
964 DECL_HAS_VALUE_EXPR_P (t) = 1;
965 }
966 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
967 if (TREE_CODE (node) == VAR_DECL)
968 DECL_HAS_DEBUG_EXPR_P (t) = 0;
969 if (TREE_CODE (node) == VAR_DECL && DECL_HAS_INIT_PRIORITY_P (node))
970 {
971 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
972 DECL_HAS_INIT_PRIORITY_P (t) = 1;
973 }
974 if (TREE_CODE (node) == FUNCTION_DECL)
975 DECL_STRUCT_FUNCTION (t) = NULL;
976 }
977 else if (TREE_CODE_CLASS (code) == tcc_type)
978 {
979 TYPE_UID (t) = next_type_uid++;
980 /* The following is so that the debug code for
981 the copy is different from the original type.
982 The two statements usually duplicate each other
983 (because they clear fields of the same union),
984 but the optimizer should catch that. */
985 TYPE_SYMTAB_POINTER (t) = 0;
986 TYPE_SYMTAB_ADDRESS (t) = 0;
987
988 /* Do not copy the values cache. */
989 if (TYPE_CACHED_VALUES_P (t))
990 {
991 TYPE_CACHED_VALUES_P (t) = 0;
992 TYPE_CACHED_VALUES (t) = NULL_TREE;
993 }
994 }
995
996 return t;
997 }
998
999 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1000 For example, this can copy a list made of TREE_LIST nodes. */
1001
1002 tree
1003 copy_list (tree list)
1004 {
1005 tree head;
1006 tree prev, next;
1007
1008 if (list == 0)
1009 return 0;
1010
1011 head = prev = copy_node (list);
1012 next = TREE_CHAIN (list);
1013 while (next)
1014 {
1015 TREE_CHAIN (prev) = copy_node (next);
1016 prev = TREE_CHAIN (prev);
1017 next = TREE_CHAIN (next);
1018 }
1019 return head;
1020 }
1021
1022 \f
1023 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1024 INTEGER_CST with value CST and type TYPE. */
1025
1026 static unsigned int
1027 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1028 {
1029 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1030 /* We need an extra zero HWI if CST is an unsigned integer with its
1031 upper bit set, and if CST occupies a whole number of HWIs. */
1032 if (TYPE_UNSIGNED (type)
1033 && wi::neg_p (cst)
1034 && (cst.get_precision () % HOST_BITS_PER_WIDE_INT) == 0)
1035 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1036 return cst.get_len ();
1037 }
1038
1039 /* Return a new INTEGER_CST with value CST and type TYPE. */
1040
1041 static tree
1042 build_new_int_cst (tree type, const wide_int &cst)
1043 {
1044 unsigned int len = cst.get_len ();
1045 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1046 tree nt = make_int_cst (len, ext_len);
1047
1048 if (len < ext_len)
1049 {
1050 --ext_len;
1051 TREE_INT_CST_ELT (nt, ext_len) = 0;
1052 for (unsigned int i = len; i < ext_len; ++i)
1053 TREE_INT_CST_ELT (nt, i) = -1;
1054 }
1055 else if (TYPE_UNSIGNED (type)
1056 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1057 {
1058 len--;
1059 TREE_INT_CST_ELT (nt, len)
1060 = zext_hwi (cst.elt (len),
1061 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1062 }
1063
1064 for (unsigned int i = 0; i < len; i++)
1065 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1066 TREE_TYPE (nt) = type;
1067 return nt;
1068 }
1069
1070 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1071
1072 tree
1073 build_int_cst (tree type, HOST_WIDE_INT low)
1074 {
1075 /* Support legacy code. */
1076 if (!type)
1077 type = integer_type_node;
1078
1079 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1080 }
1081
1082 tree
1083 build_int_cstu (tree type, unsigned HOST_WIDE_INT cst)
1084 {
1085 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1086 }
1087
1088 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1089
1090 tree
1091 build_int_cst_type (tree type, HOST_WIDE_INT low)
1092 {
1093 gcc_assert (type);
1094 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1095 }
1096
1097 /* Constructs tree in type TYPE from with value given by CST. Signedness
1098 of CST is assumed to be the same as the signedness of TYPE. */
1099
1100 tree
1101 double_int_to_tree (tree type, double_int cst)
1102 {
1103 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1104 }
1105
1106 /* We force the wide_int CST to the range of the type TYPE by sign or
1107 zero extending it. OVERFLOWABLE indicates if we are interested in
1108 overflow of the value, when >0 we are only interested in signed
1109 overflow, for <0 we are interested in any overflow. OVERFLOWED
1110 indicates whether overflow has already occurred. CONST_OVERFLOWED
1111 indicates whether constant overflow has already occurred. We force
1112 T's value to be within range of T's type (by setting to 0 or 1 all
1113 the bits outside the type's range). We set TREE_OVERFLOWED if,
1114 OVERFLOWED is nonzero,
1115 or OVERFLOWABLE is >0 and signed overflow occurs
1116 or OVERFLOWABLE is <0 and any overflow occurs
1117 We return a new tree node for the extended wide_int. The node
1118 is shared if no overflow flags are set. */
1119
1120
1121 tree
1122 force_fit_type (tree type, const wide_int_ref &cst,
1123 int overflowable, bool overflowed)
1124 {
1125 signop sign = TYPE_SIGN (type);
1126
1127 /* If we need to set overflow flags, return a new unshared node. */
1128 if (overflowed || !wi::fits_to_tree_p (cst, type))
1129 {
1130 if (overflowed
1131 || overflowable < 0
1132 || (overflowable > 0 && sign == SIGNED))
1133 {
1134 wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign);
1135 tree t = build_new_int_cst (type, tmp);
1136 TREE_OVERFLOW (t) = 1;
1137 return t;
1138 }
1139 }
1140
1141 /* Else build a shared node. */
1142 return wide_int_to_tree (type, cst);
1143 }
1144
1145 /* These are the hash table functions for the hash table of INTEGER_CST
1146 nodes of a sizetype. */
1147
1148 /* Return the hash code code X, an INTEGER_CST. */
1149
1150 static hashval_t
1151 int_cst_hash_hash (const void *x)
1152 {
1153 const_tree const t = (const_tree) x;
1154 hashval_t code = htab_hash_pointer (TREE_TYPE (t));
1155 int i;
1156
1157 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1158 code ^= TREE_INT_CST_ELT (t, i);
1159
1160 return code;
1161 }
1162
1163 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1164 is the same as that given by *Y, which is the same. */
1165
1166 static int
1167 int_cst_hash_eq (const void *x, const void *y)
1168 {
1169 const_tree const xt = (const_tree) x;
1170 const_tree const yt = (const_tree) y;
1171
1172 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1173 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1174 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1175 return false;
1176
1177 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1178 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1179 return false;
1180
1181 return true;
1182 }
1183
1184 /* Create an INT_CST node of TYPE and value CST.
1185 The returned node is always shared. For small integers we use a
1186 per-type vector cache, for larger ones we use a single hash table.
1187 The value is extended from its precision according to the sign of
1188 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1189 the upper bits and ensures that hashing and value equality based
1190 upon the underlying HOST_WIDE_INTs works without masking. */
1191
1192 tree
1193 wide_int_to_tree (tree type, const wide_int_ref &pcst)
1194 {
1195 tree t;
1196 int ix = -1;
1197 int limit = 0;
1198
1199 gcc_assert (type);
1200 unsigned int prec = TYPE_PRECISION (type);
1201 signop sgn = TYPE_SIGN (type);
1202
1203 /* Verify that everything is canonical. */
1204 int l = pcst.get_len ();
1205 if (l > 1)
1206 {
1207 if (pcst.elt (l - 1) == 0)
1208 gcc_assert (pcst.elt (l - 2) < 0);
1209 if (pcst.elt (l - 1) == (HOST_WIDE_INT) -1)
1210 gcc_assert (pcst.elt (l - 2) >= 0);
1211 }
1212
1213 wide_int cst = wide_int::from (pcst, prec, sgn);
1214 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1215
1216 switch (TREE_CODE (type))
1217 {
1218 case NULLPTR_TYPE:
1219 gcc_assert (cst == 0);
1220 /* Fallthru. */
1221
1222 case POINTER_TYPE:
1223 case REFERENCE_TYPE:
1224 case POINTER_BOUNDS_TYPE:
1225 /* Cache NULL pointer and zero bounds. */
1226 if (cst == 0)
1227 {
1228 limit = 1;
1229 ix = 0;
1230 }
1231 break;
1232
1233 case BOOLEAN_TYPE:
1234 /* Cache false or true. */
1235 limit = 2;
1236 if (wi::leu_p (cst, 1))
1237 ix = cst.to_uhwi ();
1238 break;
1239
1240 case INTEGER_TYPE:
1241 case OFFSET_TYPE:
1242 if (TYPE_SIGN (type) == UNSIGNED)
1243 {
1244 /* Cache 0..N */
1245 limit = INTEGER_SHARE_LIMIT;
1246
1247 /* This is a little hokie, but if the prec is smaller than
1248 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1249 obvious test will not get the correct answer. */
1250 if (prec < HOST_BITS_PER_WIDE_INT)
1251 {
1252 if (cst.to_uhwi () < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1253 ix = cst.to_uhwi ();
1254 }
1255 else if (wi::ltu_p (cst, INTEGER_SHARE_LIMIT))
1256 ix = cst.to_uhwi ();
1257 }
1258 else
1259 {
1260 /* Cache -1..N */
1261 limit = INTEGER_SHARE_LIMIT + 1;
1262
1263 if (cst == -1)
1264 ix = 0;
1265 else if (!wi::neg_p (cst))
1266 {
1267 if (prec < HOST_BITS_PER_WIDE_INT)
1268 {
1269 if (cst.to_shwi () < INTEGER_SHARE_LIMIT)
1270 ix = cst.to_shwi () + 1;
1271 }
1272 else if (wi::lts_p (cst, INTEGER_SHARE_LIMIT))
1273 ix = cst.to_shwi () + 1;
1274 }
1275 }
1276 break;
1277
1278 case ENUMERAL_TYPE:
1279 break;
1280
1281 default:
1282 gcc_unreachable ();
1283 }
1284
1285 if (ext_len == 1)
1286 {
1287 /* We just need to store a single HOST_WIDE_INT. */
1288 HOST_WIDE_INT hwi;
1289 if (TYPE_UNSIGNED (type))
1290 hwi = cst.to_uhwi ();
1291 else
1292 hwi = cst.to_shwi ();
1293 if (ix >= 0)
1294 {
1295 /* Look for it in the type's vector of small shared ints. */
1296 if (!TYPE_CACHED_VALUES_P (type))
1297 {
1298 TYPE_CACHED_VALUES_P (type) = 1;
1299 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1300 }
1301
1302 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1303 if (t)
1304 /* Make sure no one is clobbering the shared constant. */
1305 gcc_assert (TREE_TYPE (t) == type
1306 && TREE_INT_CST_NUNITS (t) == 1
1307 && TREE_INT_CST_EXT_NUNITS (t) == 1
1308 && TREE_INT_CST_ELT (t, 0) == hwi);
1309 else
1310 {
1311 /* Create a new shared int. */
1312 t = build_new_int_cst (type, cst);
1313 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1314 }
1315 }
1316 else
1317 {
1318 /* Use the cache of larger shared ints, using int_cst_node as
1319 a temporary. */
1320 void **slot;
1321
1322 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1323 TREE_TYPE (int_cst_node) = type;
1324
1325 slot = htab_find_slot (int_cst_hash_table, int_cst_node, INSERT);
1326 t = (tree) *slot;
1327 if (!t)
1328 {
1329 /* Insert this one into the hash table. */
1330 t = int_cst_node;
1331 *slot = t;
1332 /* Make a new node for next time round. */
1333 int_cst_node = make_int_cst (1, 1);
1334 }
1335 }
1336 }
1337 else
1338 {
1339 /* The value either hashes properly or we drop it on the floor
1340 for the gc to take care of. There will not be enough of them
1341 to worry about. */
1342 void **slot;
1343
1344 tree nt = build_new_int_cst (type, cst);
1345 slot = htab_find_slot (int_cst_hash_table, nt, INSERT);
1346 t = (tree) *slot;
1347 if (!t)
1348 {
1349 /* Insert this one into the hash table. */
1350 t = nt;
1351 *slot = t;
1352 }
1353 }
1354
1355 return t;
1356 }
1357
1358 void
1359 cache_integer_cst (tree t)
1360 {
1361 tree type = TREE_TYPE (t);
1362 int ix = -1;
1363 int limit = 0;
1364 int prec = TYPE_PRECISION (type);
1365
1366 gcc_assert (!TREE_OVERFLOW (t));
1367
1368 switch (TREE_CODE (type))
1369 {
1370 case NULLPTR_TYPE:
1371 gcc_assert (integer_zerop (t));
1372 /* Fallthru. */
1373
1374 case POINTER_TYPE:
1375 case REFERENCE_TYPE:
1376 /* Cache NULL pointer. */
1377 if (integer_zerop (t))
1378 {
1379 limit = 1;
1380 ix = 0;
1381 }
1382 break;
1383
1384 case BOOLEAN_TYPE:
1385 /* Cache false or true. */
1386 limit = 2;
1387 if (wi::ltu_p (t, 2))
1388 ix = TREE_INT_CST_ELT (t, 0);
1389 break;
1390
1391 case INTEGER_TYPE:
1392 case OFFSET_TYPE:
1393 if (TYPE_UNSIGNED (type))
1394 {
1395 /* Cache 0..N */
1396 limit = INTEGER_SHARE_LIMIT;
1397
1398 /* This is a little hokie, but if the prec is smaller than
1399 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1400 obvious test will not get the correct answer. */
1401 if (prec < HOST_BITS_PER_WIDE_INT)
1402 {
1403 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1404 ix = tree_to_uhwi (t);
1405 }
1406 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1407 ix = tree_to_uhwi (t);
1408 }
1409 else
1410 {
1411 /* Cache -1..N */
1412 limit = INTEGER_SHARE_LIMIT + 1;
1413
1414 if (integer_minus_onep (t))
1415 ix = 0;
1416 else if (!wi::neg_p (t))
1417 {
1418 if (prec < HOST_BITS_PER_WIDE_INT)
1419 {
1420 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1421 ix = tree_to_shwi (t) + 1;
1422 }
1423 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1424 ix = tree_to_shwi (t) + 1;
1425 }
1426 }
1427 break;
1428
1429 case ENUMERAL_TYPE:
1430 break;
1431
1432 default:
1433 gcc_unreachable ();
1434 }
1435
1436 if (ix >= 0)
1437 {
1438 /* Look for it in the type's vector of small shared ints. */
1439 if (!TYPE_CACHED_VALUES_P (type))
1440 {
1441 TYPE_CACHED_VALUES_P (type) = 1;
1442 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1443 }
1444
1445 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1446 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1447 }
1448 else
1449 {
1450 /* Use the cache of larger shared ints. */
1451 void **slot;
1452
1453 slot = htab_find_slot (int_cst_hash_table, t, INSERT);
1454 /* If there is already an entry for the number verify it's the
1455 same. */
1456 if (*slot)
1457 gcc_assert (wi::eq_p (tree (*slot), t));
1458 else
1459 /* Otherwise insert this one into the hash table. */
1460 *slot = t;
1461 }
1462 }
1463
1464
1465 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1466 and the rest are zeros. */
1467
1468 tree
1469 build_low_bits_mask (tree type, unsigned bits)
1470 {
1471 gcc_assert (bits <= TYPE_PRECISION (type));
1472
1473 return wide_int_to_tree (type, wi::mask (bits, false,
1474 TYPE_PRECISION (type)));
1475 }
1476
1477 /* Build a newly constructed TREE_VEC node of length LEN. */
1478
1479 tree
1480 make_vector_stat (unsigned len MEM_STAT_DECL)
1481 {
1482 tree t;
1483 unsigned length = (len - 1) * sizeof (tree) + sizeof (struct tree_vector);
1484
1485 record_node_allocation_statistics (VECTOR_CST, length);
1486
1487 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1488
1489 TREE_SET_CODE (t, VECTOR_CST);
1490 TREE_CONSTANT (t) = 1;
1491
1492 return t;
1493 }
1494
1495 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1496 are in a list pointed to by VALS. */
1497
1498 tree
1499 build_vector_stat (tree type, tree *vals MEM_STAT_DECL)
1500 {
1501 int over = 0;
1502 unsigned cnt = 0;
1503 tree v = make_vector (TYPE_VECTOR_SUBPARTS (type));
1504 TREE_TYPE (v) = type;
1505
1506 /* Iterate through elements and check for overflow. */
1507 for (cnt = 0; cnt < TYPE_VECTOR_SUBPARTS (type); ++cnt)
1508 {
1509 tree value = vals[cnt];
1510
1511 VECTOR_CST_ELT (v, cnt) = value;
1512
1513 /* Don't crash if we get an address constant. */
1514 if (!CONSTANT_CLASS_P (value))
1515 continue;
1516
1517 over |= TREE_OVERFLOW (value);
1518 }
1519
1520 TREE_OVERFLOW (v) = over;
1521 return v;
1522 }
1523
1524 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1525 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1526
1527 tree
1528 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1529 {
1530 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
1531 unsigned HOST_WIDE_INT idx;
1532 tree value;
1533
1534 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1535 vec[idx] = value;
1536 for (; idx < TYPE_VECTOR_SUBPARTS (type); ++idx)
1537 vec[idx] = build_zero_cst (TREE_TYPE (type));
1538
1539 return build_vector (type, vec);
1540 }
1541
1542 /* Build a vector of type VECTYPE where all the elements are SCs. */
1543 tree
1544 build_vector_from_val (tree vectype, tree sc)
1545 {
1546 int i, nunits = TYPE_VECTOR_SUBPARTS (vectype);
1547
1548 if (sc == error_mark_node)
1549 return sc;
1550
1551 /* Verify that the vector type is suitable for SC. Note that there
1552 is some inconsistency in the type-system with respect to restrict
1553 qualifications of pointers. Vector types always have a main-variant
1554 element type and the qualification is applied to the vector-type.
1555 So TREE_TYPE (vector-type) does not return a properly qualified
1556 vector element-type. */
1557 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1558 TREE_TYPE (vectype)));
1559
1560 if (CONSTANT_CLASS_P (sc))
1561 {
1562 tree *v = XALLOCAVEC (tree, nunits);
1563 for (i = 0; i < nunits; ++i)
1564 v[i] = sc;
1565 return build_vector (vectype, v);
1566 }
1567 else
1568 {
1569 vec<constructor_elt, va_gc> *v;
1570 vec_alloc (v, nunits);
1571 for (i = 0; i < nunits; ++i)
1572 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1573 return build_constructor (vectype, v);
1574 }
1575 }
1576
1577 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1578 are in the vec pointed to by VALS. */
1579 tree
1580 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
1581 {
1582 tree c = make_node (CONSTRUCTOR);
1583 unsigned int i;
1584 constructor_elt *elt;
1585 bool constant_p = true;
1586 bool side_effects_p = false;
1587
1588 TREE_TYPE (c) = type;
1589 CONSTRUCTOR_ELTS (c) = vals;
1590
1591 FOR_EACH_VEC_SAFE_ELT (vals, i, elt)
1592 {
1593 /* Mostly ctors will have elts that don't have side-effects, so
1594 the usual case is to scan all the elements. Hence a single
1595 loop for both const and side effects, rather than one loop
1596 each (with early outs). */
1597 if (!TREE_CONSTANT (elt->value))
1598 constant_p = false;
1599 if (TREE_SIDE_EFFECTS (elt->value))
1600 side_effects_p = true;
1601 }
1602
1603 TREE_SIDE_EFFECTS (c) = side_effects_p;
1604 TREE_CONSTANT (c) = constant_p;
1605
1606 return c;
1607 }
1608
1609 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1610 INDEX and VALUE. */
1611 tree
1612 build_constructor_single (tree type, tree index, tree value)
1613 {
1614 vec<constructor_elt, va_gc> *v;
1615 constructor_elt elt = {index, value};
1616
1617 vec_alloc (v, 1);
1618 v->quick_push (elt);
1619
1620 return build_constructor (type, v);
1621 }
1622
1623
1624 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1625 are in a list pointed to by VALS. */
1626 tree
1627 build_constructor_from_list (tree type, tree vals)
1628 {
1629 tree t;
1630 vec<constructor_elt, va_gc> *v = NULL;
1631
1632 if (vals)
1633 {
1634 vec_alloc (v, list_length (vals));
1635 for (t = vals; t; t = TREE_CHAIN (t))
1636 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
1637 }
1638
1639 return build_constructor (type, v);
1640 }
1641
1642 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1643 of elements, provided as index/value pairs. */
1644
1645 tree
1646 build_constructor_va (tree type, int nelts, ...)
1647 {
1648 vec<constructor_elt, va_gc> *v = NULL;
1649 va_list p;
1650
1651 va_start (p, nelts);
1652 vec_alloc (v, nelts);
1653 while (nelts--)
1654 {
1655 tree index = va_arg (p, tree);
1656 tree value = va_arg (p, tree);
1657 CONSTRUCTOR_APPEND_ELT (v, index, value);
1658 }
1659 va_end (p);
1660 return build_constructor (type, v);
1661 }
1662
1663 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1664
1665 tree
1666 build_fixed (tree type, FIXED_VALUE_TYPE f)
1667 {
1668 tree v;
1669 FIXED_VALUE_TYPE *fp;
1670
1671 v = make_node (FIXED_CST);
1672 fp = ggc_alloc_fixed_value ();
1673 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
1674
1675 TREE_TYPE (v) = type;
1676 TREE_FIXED_CST_PTR (v) = fp;
1677 return v;
1678 }
1679
1680 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1681
1682 tree
1683 build_real (tree type, REAL_VALUE_TYPE d)
1684 {
1685 tree v;
1686 REAL_VALUE_TYPE *dp;
1687 int overflow = 0;
1688
1689 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1690 Consider doing it via real_convert now. */
1691
1692 v = make_node (REAL_CST);
1693 dp = ggc_alloc_real_value ();
1694 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
1695
1696 TREE_TYPE (v) = type;
1697 TREE_REAL_CST_PTR (v) = dp;
1698 TREE_OVERFLOW (v) = overflow;
1699 return v;
1700 }
1701
1702 /* Return a new REAL_CST node whose type is TYPE
1703 and whose value is the integer value of the INTEGER_CST node I. */
1704
1705 REAL_VALUE_TYPE
1706 real_value_from_int_cst (const_tree type, const_tree i)
1707 {
1708 REAL_VALUE_TYPE d;
1709
1710 /* Clear all bits of the real value type so that we can later do
1711 bitwise comparisons to see if two values are the same. */
1712 memset (&d, 0, sizeof d);
1713
1714 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode,
1715 wide_int (i), TYPE_SIGN (TREE_TYPE (i)));
1716 return d;
1717 }
1718
1719 /* Given a tree representing an integer constant I, return a tree
1720 representing the same value as a floating-point constant of type TYPE. */
1721
1722 tree
1723 build_real_from_int_cst (tree type, const_tree i)
1724 {
1725 tree v;
1726 int overflow = TREE_OVERFLOW (i);
1727
1728 v = build_real (type, real_value_from_int_cst (type, i));
1729
1730 TREE_OVERFLOW (v) |= overflow;
1731 return v;
1732 }
1733
1734 /* Return a newly constructed STRING_CST node whose value is
1735 the LEN characters at STR.
1736 Note that for a C string literal, LEN should include the trailing NUL.
1737 The TREE_TYPE is not initialized. */
1738
1739 tree
1740 build_string (int len, const char *str)
1741 {
1742 tree s;
1743 size_t length;
1744
1745 /* Do not waste bytes provided by padding of struct tree_string. */
1746 length = len + offsetof (struct tree_string, str) + 1;
1747
1748 record_node_allocation_statistics (STRING_CST, length);
1749
1750 s = ggc_alloc_tree_node (length);
1751
1752 memset (s, 0, sizeof (struct tree_typed));
1753 TREE_SET_CODE (s, STRING_CST);
1754 TREE_CONSTANT (s) = 1;
1755 TREE_STRING_LENGTH (s) = len;
1756 memcpy (s->string.str, str, len);
1757 s->string.str[len] = '\0';
1758
1759 return s;
1760 }
1761
1762 /* Return a newly constructed COMPLEX_CST node whose value is
1763 specified by the real and imaginary parts REAL and IMAG.
1764 Both REAL and IMAG should be constant nodes. TYPE, if specified,
1765 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
1766
1767 tree
1768 build_complex (tree type, tree real, tree imag)
1769 {
1770 tree t = make_node (COMPLEX_CST);
1771
1772 TREE_REALPART (t) = real;
1773 TREE_IMAGPART (t) = imag;
1774 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
1775 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
1776 return t;
1777 }
1778
1779 /* Return a constant of arithmetic type TYPE which is the
1780 multiplicative identity of the set TYPE. */
1781
1782 tree
1783 build_one_cst (tree type)
1784 {
1785 switch (TREE_CODE (type))
1786 {
1787 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1788 case POINTER_TYPE: case REFERENCE_TYPE:
1789 case OFFSET_TYPE:
1790 return build_int_cst (type, 1);
1791
1792 case REAL_TYPE:
1793 return build_real (type, dconst1);
1794
1795 case FIXED_POINT_TYPE:
1796 /* We can only generate 1 for accum types. */
1797 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1798 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
1799
1800 case VECTOR_TYPE:
1801 {
1802 tree scalar = build_one_cst (TREE_TYPE (type));
1803
1804 return build_vector_from_val (type, scalar);
1805 }
1806
1807 case COMPLEX_TYPE:
1808 return build_complex (type,
1809 build_one_cst (TREE_TYPE (type)),
1810 build_zero_cst (TREE_TYPE (type)));
1811
1812 default:
1813 gcc_unreachable ();
1814 }
1815 }
1816
1817 /* Return an integer of type TYPE containing all 1's in as much precision as
1818 it contains, or a complex or vector whose subparts are such integers. */
1819
1820 tree
1821 build_all_ones_cst (tree type)
1822 {
1823 if (TREE_CODE (type) == COMPLEX_TYPE)
1824 {
1825 tree scalar = build_all_ones_cst (TREE_TYPE (type));
1826 return build_complex (type, scalar, scalar);
1827 }
1828 else
1829 return build_minus_one_cst (type);
1830 }
1831
1832 /* Return a constant of arithmetic type TYPE which is the
1833 opposite of the multiplicative identity of the set TYPE. */
1834
1835 tree
1836 build_minus_one_cst (tree type)
1837 {
1838 switch (TREE_CODE (type))
1839 {
1840 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1841 case POINTER_TYPE: case REFERENCE_TYPE:
1842 case OFFSET_TYPE:
1843 return build_int_cst (type, -1);
1844
1845 case REAL_TYPE:
1846 return build_real (type, dconstm1);
1847
1848 case FIXED_POINT_TYPE:
1849 /* We can only generate 1 for accum types. */
1850 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1851 return build_fixed (type, fixed_from_double_int (double_int_minus_one,
1852 TYPE_MODE (type)));
1853
1854 case VECTOR_TYPE:
1855 {
1856 tree scalar = build_minus_one_cst (TREE_TYPE (type));
1857
1858 return build_vector_from_val (type, scalar);
1859 }
1860
1861 case COMPLEX_TYPE:
1862 return build_complex (type,
1863 build_minus_one_cst (TREE_TYPE (type)),
1864 build_zero_cst (TREE_TYPE (type)));
1865
1866 default:
1867 gcc_unreachable ();
1868 }
1869 }
1870
1871 /* Build 0 constant of type TYPE. This is used by constructor folding
1872 and thus the constant should be represented in memory by
1873 zero(es). */
1874
1875 tree
1876 build_zero_cst (tree type)
1877 {
1878 switch (TREE_CODE (type))
1879 {
1880 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1881 case POINTER_TYPE: case REFERENCE_TYPE:
1882 case OFFSET_TYPE: case NULLPTR_TYPE:
1883 return build_int_cst (type, 0);
1884
1885 case REAL_TYPE:
1886 return build_real (type, dconst0);
1887
1888 case FIXED_POINT_TYPE:
1889 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
1890
1891 case VECTOR_TYPE:
1892 {
1893 tree scalar = build_zero_cst (TREE_TYPE (type));
1894
1895 return build_vector_from_val (type, scalar);
1896 }
1897
1898 case COMPLEX_TYPE:
1899 {
1900 tree zero = build_zero_cst (TREE_TYPE (type));
1901
1902 return build_complex (type, zero, zero);
1903 }
1904
1905 default:
1906 if (!AGGREGATE_TYPE_P (type))
1907 return fold_convert (type, integer_zero_node);
1908 return build_constructor (type, NULL);
1909 }
1910 }
1911
1912
1913 /* Build a BINFO with LEN language slots. */
1914
1915 tree
1916 make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL)
1917 {
1918 tree t;
1919 size_t length = (offsetof (struct tree_binfo, base_binfos)
1920 + vec<tree, va_gc>::embedded_size (base_binfos));
1921
1922 record_node_allocation_statistics (TREE_BINFO, length);
1923
1924 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1925
1926 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
1927
1928 TREE_SET_CODE (t, TREE_BINFO);
1929
1930 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
1931
1932 return t;
1933 }
1934
1935 /* Create a CASE_LABEL_EXPR tree node and return it. */
1936
1937 tree
1938 build_case_label (tree low_value, tree high_value, tree label_decl)
1939 {
1940 tree t = make_node (CASE_LABEL_EXPR);
1941
1942 TREE_TYPE (t) = void_type_node;
1943 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
1944
1945 CASE_LOW (t) = low_value;
1946 CASE_HIGH (t) = high_value;
1947 CASE_LABEL (t) = label_decl;
1948 CASE_CHAIN (t) = NULL_TREE;
1949
1950 return t;
1951 }
1952
1953 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
1954 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
1955 The latter determines the length of the HOST_WIDE_INT vector. */
1956
1957 tree
1958 make_int_cst_stat (int len, int ext_len MEM_STAT_DECL)
1959 {
1960 tree t;
1961 int length = (ext_len - 1) * sizeof (tree) + sizeof (struct tree_int_cst);
1962
1963 gcc_assert (len);
1964 record_node_allocation_statistics (INTEGER_CST, length);
1965
1966 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1967
1968 TREE_SET_CODE (t, INTEGER_CST);
1969 TREE_INT_CST_NUNITS (t) = len;
1970 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
1971
1972 TREE_CONSTANT (t) = 1;
1973
1974 return t;
1975 }
1976
1977 /* Build a newly constructed TREE_VEC node of length LEN. */
1978
1979 tree
1980 make_tree_vec_stat (int len MEM_STAT_DECL)
1981 {
1982 tree t;
1983 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
1984
1985 record_node_allocation_statistics (TREE_VEC, length);
1986
1987 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1988
1989 TREE_SET_CODE (t, TREE_VEC);
1990 TREE_VEC_LENGTH (t) = len;
1991
1992 return t;
1993 }
1994
1995 /* Grow a TREE_VEC node to new length LEN. */
1996
1997 tree
1998 grow_tree_vec_stat (tree v, int len MEM_STAT_DECL)
1999 {
2000 gcc_assert (TREE_CODE (v) == TREE_VEC);
2001
2002 int oldlen = TREE_VEC_LENGTH (v);
2003 gcc_assert (len > oldlen);
2004
2005 int oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2006 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2007
2008 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2009
2010 v = (tree) ggc_realloc_stat (v, length PASS_MEM_STAT);
2011
2012 TREE_VEC_LENGTH (v) = len;
2013
2014 return v;
2015 }
2016 \f
2017 /* Return 1 if EXPR is the integer constant zero or a complex constant
2018 of zero. */
2019
2020 int
2021 integer_zerop (const_tree expr)
2022 {
2023 STRIP_NOPS (expr);
2024
2025 switch (TREE_CODE (expr))
2026 {
2027 case INTEGER_CST:
2028 return wi::eq_p (expr, 0);
2029 case COMPLEX_CST:
2030 return (integer_zerop (TREE_REALPART (expr))
2031 && integer_zerop (TREE_IMAGPART (expr)));
2032 case VECTOR_CST:
2033 {
2034 unsigned i;
2035 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2036 if (!integer_zerop (VECTOR_CST_ELT (expr, i)))
2037 return false;
2038 return true;
2039 }
2040 default:
2041 return false;
2042 }
2043 }
2044
2045 /* Return 1 if EXPR is the integer constant one or the corresponding
2046 complex constant. */
2047
2048 int
2049 integer_onep (const_tree expr)
2050 {
2051 STRIP_NOPS (expr);
2052
2053 switch (TREE_CODE (expr))
2054 {
2055 case INTEGER_CST:
2056 return wi::eq_p (wi::to_widest (expr), 1);
2057 case COMPLEX_CST:
2058 return (integer_onep (TREE_REALPART (expr))
2059 && integer_zerop (TREE_IMAGPART (expr)));
2060 case VECTOR_CST:
2061 {
2062 unsigned i;
2063 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2064 if (!integer_onep (VECTOR_CST_ELT (expr, i)))
2065 return false;
2066 return true;
2067 }
2068 default:
2069 return false;
2070 }
2071 }
2072
2073 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2074 it contains, or a complex or vector whose subparts are such integers. */
2075
2076 int
2077 integer_all_onesp (const_tree expr)
2078 {
2079 STRIP_NOPS (expr);
2080
2081 if (TREE_CODE (expr) == COMPLEX_CST
2082 && integer_all_onesp (TREE_REALPART (expr))
2083 && integer_all_onesp (TREE_IMAGPART (expr)))
2084 return 1;
2085
2086 else if (TREE_CODE (expr) == VECTOR_CST)
2087 {
2088 unsigned i;
2089 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2090 if (!integer_all_onesp (VECTOR_CST_ELT (expr, i)))
2091 return 0;
2092 return 1;
2093 }
2094
2095 else if (TREE_CODE (expr) != INTEGER_CST)
2096 return 0;
2097
2098 return wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED) == expr;
2099 }
2100
2101 /* Return 1 if EXPR is the integer constant minus one. */
2102
2103 int
2104 integer_minus_onep (const_tree expr)
2105 {
2106 STRIP_NOPS (expr);
2107
2108 if (TREE_CODE (expr) == COMPLEX_CST)
2109 return (integer_all_onesp (TREE_REALPART (expr))
2110 && integer_zerop (TREE_IMAGPART (expr)));
2111 else
2112 return integer_all_onesp (expr);
2113 }
2114
2115 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2116 one bit on). */
2117
2118 int
2119 integer_pow2p (const_tree expr)
2120 {
2121 STRIP_NOPS (expr);
2122
2123 if (TREE_CODE (expr) == COMPLEX_CST
2124 && integer_pow2p (TREE_REALPART (expr))
2125 && integer_zerop (TREE_IMAGPART (expr)))
2126 return 1;
2127
2128 if (TREE_CODE (expr) != INTEGER_CST)
2129 return 0;
2130
2131 return wi::popcount (expr) == 1;
2132 }
2133
2134 /* Return 1 if EXPR is an integer constant other than zero or a
2135 complex constant other than zero. */
2136
2137 int
2138 integer_nonzerop (const_tree expr)
2139 {
2140 STRIP_NOPS (expr);
2141
2142 return ((TREE_CODE (expr) == INTEGER_CST
2143 && !wi::eq_p (expr, 0))
2144 || (TREE_CODE (expr) == COMPLEX_CST
2145 && (integer_nonzerop (TREE_REALPART (expr))
2146 || integer_nonzerop (TREE_IMAGPART (expr)))));
2147 }
2148
2149 /* Return 1 if EXPR is the fixed-point constant zero. */
2150
2151 int
2152 fixed_zerop (const_tree expr)
2153 {
2154 return (TREE_CODE (expr) == FIXED_CST
2155 && TREE_FIXED_CST (expr).data.is_zero ());
2156 }
2157
2158 /* Return the power of two represented by a tree node known to be a
2159 power of two. */
2160
2161 int
2162 tree_log2 (const_tree expr)
2163 {
2164 STRIP_NOPS (expr);
2165
2166 if (TREE_CODE (expr) == COMPLEX_CST)
2167 return tree_log2 (TREE_REALPART (expr));
2168
2169 return wi::exact_log2 (expr);
2170 }
2171
2172 /* Similar, but return the largest integer Y such that 2 ** Y is less
2173 than or equal to EXPR. */
2174
2175 int
2176 tree_floor_log2 (const_tree expr)
2177 {
2178 STRIP_NOPS (expr);
2179
2180 if (TREE_CODE (expr) == COMPLEX_CST)
2181 return tree_log2 (TREE_REALPART (expr));
2182
2183 return wi::floor_log2 (expr);
2184 }
2185
2186 /* Return number of known trailing zero bits in EXPR, or, if the value of
2187 EXPR is known to be zero, the precision of it's type. */
2188
2189 unsigned int
2190 tree_ctz (const_tree expr)
2191 {
2192 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2193 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2194 return 0;
2195
2196 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2197 switch (TREE_CODE (expr))
2198 {
2199 case INTEGER_CST:
2200 ret1 = wi::ctz (expr);
2201 return MIN (ret1, prec);
2202 case SSA_NAME:
2203 ret1 = wi::ctz (get_nonzero_bits (expr));
2204 return MIN (ret1, prec);
2205 case PLUS_EXPR:
2206 case MINUS_EXPR:
2207 case BIT_IOR_EXPR:
2208 case BIT_XOR_EXPR:
2209 case MIN_EXPR:
2210 case MAX_EXPR:
2211 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2212 if (ret1 == 0)
2213 return ret1;
2214 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2215 return MIN (ret1, ret2);
2216 case POINTER_PLUS_EXPR:
2217 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2218 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2219 /* Second operand is sizetype, which could be in theory
2220 wider than pointer's precision. Make sure we never
2221 return more than prec. */
2222 ret2 = MIN (ret2, prec);
2223 return MIN (ret1, ret2);
2224 case BIT_AND_EXPR:
2225 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2226 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2227 return MAX (ret1, ret2);
2228 case MULT_EXPR:
2229 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2230 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2231 return MIN (ret1 + ret2, prec);
2232 case LSHIFT_EXPR:
2233 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2234 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2235 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2236 {
2237 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2238 return MIN (ret1 + ret2, prec);
2239 }
2240 return ret1;
2241 case RSHIFT_EXPR:
2242 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2243 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2244 {
2245 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2246 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2247 if (ret1 > ret2)
2248 return ret1 - ret2;
2249 }
2250 return 0;
2251 case TRUNC_DIV_EXPR:
2252 case CEIL_DIV_EXPR:
2253 case FLOOR_DIV_EXPR:
2254 case ROUND_DIV_EXPR:
2255 case EXACT_DIV_EXPR:
2256 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2257 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2258 {
2259 int l = tree_log2 (TREE_OPERAND (expr, 1));
2260 if (l >= 0)
2261 {
2262 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2263 ret2 = l;
2264 if (ret1 > ret2)
2265 return ret1 - ret2;
2266 }
2267 }
2268 return 0;
2269 CASE_CONVERT:
2270 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2271 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2272 ret1 = prec;
2273 return MIN (ret1, prec);
2274 case SAVE_EXPR:
2275 return tree_ctz (TREE_OPERAND (expr, 0));
2276 case COND_EXPR:
2277 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2278 if (ret1 == 0)
2279 return 0;
2280 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2281 return MIN (ret1, ret2);
2282 case COMPOUND_EXPR:
2283 return tree_ctz (TREE_OPERAND (expr, 1));
2284 case ADDR_EXPR:
2285 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2286 if (ret1 > BITS_PER_UNIT)
2287 {
2288 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2289 return MIN (ret1, prec);
2290 }
2291 return 0;
2292 default:
2293 return 0;
2294 }
2295 }
2296
2297 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2298 decimal float constants, so don't return 1 for them. */
2299
2300 int
2301 real_zerop (const_tree expr)
2302 {
2303 STRIP_NOPS (expr);
2304
2305 switch (TREE_CODE (expr))
2306 {
2307 case REAL_CST:
2308 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst0)
2309 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2310 case COMPLEX_CST:
2311 return real_zerop (TREE_REALPART (expr))
2312 && real_zerop (TREE_IMAGPART (expr));
2313 case VECTOR_CST:
2314 {
2315 unsigned i;
2316 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2317 if (!real_zerop (VECTOR_CST_ELT (expr, i)))
2318 return false;
2319 return true;
2320 }
2321 default:
2322 return false;
2323 }
2324 }
2325
2326 /* Return 1 if EXPR is the real constant one in real or complex form.
2327 Trailing zeroes matter for decimal float constants, so don't return
2328 1 for them. */
2329
2330 int
2331 real_onep (const_tree expr)
2332 {
2333 STRIP_NOPS (expr);
2334
2335 switch (TREE_CODE (expr))
2336 {
2337 case REAL_CST:
2338 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst1)
2339 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2340 case COMPLEX_CST:
2341 return real_onep (TREE_REALPART (expr))
2342 && real_zerop (TREE_IMAGPART (expr));
2343 case VECTOR_CST:
2344 {
2345 unsigned i;
2346 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2347 if (!real_onep (VECTOR_CST_ELT (expr, i)))
2348 return false;
2349 return true;
2350 }
2351 default:
2352 return false;
2353 }
2354 }
2355
2356 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2357 matter for decimal float constants, so don't return 1 for them. */
2358
2359 int
2360 real_minus_onep (const_tree expr)
2361 {
2362 STRIP_NOPS (expr);
2363
2364 switch (TREE_CODE (expr))
2365 {
2366 case REAL_CST:
2367 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconstm1)
2368 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2369 case COMPLEX_CST:
2370 return real_minus_onep (TREE_REALPART (expr))
2371 && real_zerop (TREE_IMAGPART (expr));
2372 case VECTOR_CST:
2373 {
2374 unsigned i;
2375 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2376 if (!real_minus_onep (VECTOR_CST_ELT (expr, i)))
2377 return false;
2378 return true;
2379 }
2380 default:
2381 return false;
2382 }
2383 }
2384
2385 /* Nonzero if EXP is a constant or a cast of a constant. */
2386
2387 int
2388 really_constant_p (const_tree exp)
2389 {
2390 /* This is not quite the same as STRIP_NOPS. It does more. */
2391 while (CONVERT_EXPR_P (exp)
2392 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2393 exp = TREE_OPERAND (exp, 0);
2394 return TREE_CONSTANT (exp);
2395 }
2396 \f
2397 /* Return first list element whose TREE_VALUE is ELEM.
2398 Return 0 if ELEM is not in LIST. */
2399
2400 tree
2401 value_member (tree elem, tree list)
2402 {
2403 while (list)
2404 {
2405 if (elem == TREE_VALUE (list))
2406 return list;
2407 list = TREE_CHAIN (list);
2408 }
2409 return NULL_TREE;
2410 }
2411
2412 /* Return first list element whose TREE_PURPOSE is ELEM.
2413 Return 0 if ELEM is not in LIST. */
2414
2415 tree
2416 purpose_member (const_tree elem, tree list)
2417 {
2418 while (list)
2419 {
2420 if (elem == TREE_PURPOSE (list))
2421 return list;
2422 list = TREE_CHAIN (list);
2423 }
2424 return NULL_TREE;
2425 }
2426
2427 /* Return true if ELEM is in V. */
2428
2429 bool
2430 vec_member (const_tree elem, vec<tree, va_gc> *v)
2431 {
2432 unsigned ix;
2433 tree t;
2434 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2435 if (elem == t)
2436 return true;
2437 return false;
2438 }
2439
2440 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2441 NULL_TREE. */
2442
2443 tree
2444 chain_index (int idx, tree chain)
2445 {
2446 for (; chain && idx > 0; --idx)
2447 chain = TREE_CHAIN (chain);
2448 return chain;
2449 }
2450
2451 /* Return nonzero if ELEM is part of the chain CHAIN. */
2452
2453 int
2454 chain_member (const_tree elem, const_tree chain)
2455 {
2456 while (chain)
2457 {
2458 if (elem == chain)
2459 return 1;
2460 chain = DECL_CHAIN (chain);
2461 }
2462
2463 return 0;
2464 }
2465
2466 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2467 We expect a null pointer to mark the end of the chain.
2468 This is the Lisp primitive `length'. */
2469
2470 int
2471 list_length (const_tree t)
2472 {
2473 const_tree p = t;
2474 #ifdef ENABLE_TREE_CHECKING
2475 const_tree q = t;
2476 #endif
2477 int len = 0;
2478
2479 while (p)
2480 {
2481 p = TREE_CHAIN (p);
2482 #ifdef ENABLE_TREE_CHECKING
2483 if (len % 2)
2484 q = TREE_CHAIN (q);
2485 gcc_assert (p != q);
2486 #endif
2487 len++;
2488 }
2489
2490 return len;
2491 }
2492
2493 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2494 UNION_TYPE TYPE, or NULL_TREE if none. */
2495
2496 tree
2497 first_field (const_tree type)
2498 {
2499 tree t = TYPE_FIELDS (type);
2500 while (t && TREE_CODE (t) != FIELD_DECL)
2501 t = TREE_CHAIN (t);
2502 return t;
2503 }
2504
2505 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2506 by modifying the last node in chain 1 to point to chain 2.
2507 This is the Lisp primitive `nconc'. */
2508
2509 tree
2510 chainon (tree op1, tree op2)
2511 {
2512 tree t1;
2513
2514 if (!op1)
2515 return op2;
2516 if (!op2)
2517 return op1;
2518
2519 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
2520 continue;
2521 TREE_CHAIN (t1) = op2;
2522
2523 #ifdef ENABLE_TREE_CHECKING
2524 {
2525 tree t2;
2526 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
2527 gcc_assert (t2 != t1);
2528 }
2529 #endif
2530
2531 return op1;
2532 }
2533
2534 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2535
2536 tree
2537 tree_last (tree chain)
2538 {
2539 tree next;
2540 if (chain)
2541 while ((next = TREE_CHAIN (chain)))
2542 chain = next;
2543 return chain;
2544 }
2545
2546 /* Reverse the order of elements in the chain T,
2547 and return the new head of the chain (old last element). */
2548
2549 tree
2550 nreverse (tree t)
2551 {
2552 tree prev = 0, decl, next;
2553 for (decl = t; decl; decl = next)
2554 {
2555 /* We shouldn't be using this function to reverse BLOCK chains; we
2556 have blocks_nreverse for that. */
2557 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
2558 next = TREE_CHAIN (decl);
2559 TREE_CHAIN (decl) = prev;
2560 prev = decl;
2561 }
2562 return prev;
2563 }
2564 \f
2565 /* Return a newly created TREE_LIST node whose
2566 purpose and value fields are PARM and VALUE. */
2567
2568 tree
2569 build_tree_list_stat (tree parm, tree value MEM_STAT_DECL)
2570 {
2571 tree t = make_node_stat (TREE_LIST PASS_MEM_STAT);
2572 TREE_PURPOSE (t) = parm;
2573 TREE_VALUE (t) = value;
2574 return t;
2575 }
2576
2577 /* Build a chain of TREE_LIST nodes from a vector. */
2578
2579 tree
2580 build_tree_list_vec_stat (const vec<tree, va_gc> *vec MEM_STAT_DECL)
2581 {
2582 tree ret = NULL_TREE;
2583 tree *pp = &ret;
2584 unsigned int i;
2585 tree t;
2586 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
2587 {
2588 *pp = build_tree_list_stat (NULL, t PASS_MEM_STAT);
2589 pp = &TREE_CHAIN (*pp);
2590 }
2591 return ret;
2592 }
2593
2594 /* Return a newly created TREE_LIST node whose
2595 purpose and value fields are PURPOSE and VALUE
2596 and whose TREE_CHAIN is CHAIN. */
2597
2598 tree
2599 tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL)
2600 {
2601 tree node;
2602
2603 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
2604 memset (node, 0, sizeof (struct tree_common));
2605
2606 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
2607
2608 TREE_SET_CODE (node, TREE_LIST);
2609 TREE_CHAIN (node) = chain;
2610 TREE_PURPOSE (node) = purpose;
2611 TREE_VALUE (node) = value;
2612 return node;
2613 }
2614
2615 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2616 trees. */
2617
2618 vec<tree, va_gc> *
2619 ctor_to_vec (tree ctor)
2620 {
2621 vec<tree, va_gc> *vec;
2622 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
2623 unsigned int ix;
2624 tree val;
2625
2626 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
2627 vec->quick_push (val);
2628
2629 return vec;
2630 }
2631 \f
2632 /* Return the size nominally occupied by an object of type TYPE
2633 when it resides in memory. The value is measured in units of bytes,
2634 and its data type is that normally used for type sizes
2635 (which is the first type created by make_signed_type or
2636 make_unsigned_type). */
2637
2638 tree
2639 size_in_bytes (const_tree type)
2640 {
2641 tree t;
2642
2643 if (type == error_mark_node)
2644 return integer_zero_node;
2645
2646 type = TYPE_MAIN_VARIANT (type);
2647 t = TYPE_SIZE_UNIT (type);
2648
2649 if (t == 0)
2650 {
2651 lang_hooks.types.incomplete_type_error (NULL_TREE, type);
2652 return size_zero_node;
2653 }
2654
2655 return t;
2656 }
2657
2658 /* Return the size of TYPE (in bytes) as a wide integer
2659 or return -1 if the size can vary or is larger than an integer. */
2660
2661 HOST_WIDE_INT
2662 int_size_in_bytes (const_tree type)
2663 {
2664 tree t;
2665
2666 if (type == error_mark_node)
2667 return 0;
2668
2669 type = TYPE_MAIN_VARIANT (type);
2670 t = TYPE_SIZE_UNIT (type);
2671
2672 if (t && cst_fits_uhwi_p (t))
2673 return TREE_INT_CST_LOW (t);
2674 else
2675 return -1;
2676 }
2677
2678 /* Return the maximum size of TYPE (in bytes) as a wide integer
2679 or return -1 if the size can vary or is larger than an integer. */
2680
2681 HOST_WIDE_INT
2682 max_int_size_in_bytes (const_tree type)
2683 {
2684 HOST_WIDE_INT size = -1;
2685 tree size_tree;
2686
2687 /* If this is an array type, check for a possible MAX_SIZE attached. */
2688
2689 if (TREE_CODE (type) == ARRAY_TYPE)
2690 {
2691 size_tree = TYPE_ARRAY_MAX_SIZE (type);
2692
2693 if (size_tree && tree_fits_uhwi_p (size_tree))
2694 size = tree_to_uhwi (size_tree);
2695 }
2696
2697 /* If we still haven't been able to get a size, see if the language
2698 can compute a maximum size. */
2699
2700 if (size == -1)
2701 {
2702 size_tree = lang_hooks.types.max_size (type);
2703
2704 if (size_tree && tree_fits_uhwi_p (size_tree))
2705 size = tree_to_uhwi (size_tree);
2706 }
2707
2708 return size;
2709 }
2710 \f
2711 /* Return the bit position of FIELD, in bits from the start of the record.
2712 This is a tree of type bitsizetype. */
2713
2714 tree
2715 bit_position (const_tree field)
2716 {
2717 return bit_from_pos (DECL_FIELD_OFFSET (field),
2718 DECL_FIELD_BIT_OFFSET (field));
2719 }
2720
2721 /* Likewise, but return as an integer. It must be representable in
2722 that way (since it could be a signed value, we don't have the
2723 option of returning -1 like int_size_in_byte can. */
2724
2725 HOST_WIDE_INT
2726 int_bit_position (const_tree field)
2727 {
2728 return tree_to_shwi (bit_position (field));
2729 }
2730 \f
2731 /* Return the byte position of FIELD, in bytes from the start of the record.
2732 This is a tree of type sizetype. */
2733
2734 tree
2735 byte_position (const_tree field)
2736 {
2737 return byte_from_pos (DECL_FIELD_OFFSET (field),
2738 DECL_FIELD_BIT_OFFSET (field));
2739 }
2740
2741 /* Likewise, but return as an integer. It must be representable in
2742 that way (since it could be a signed value, we don't have the
2743 option of returning -1 like int_size_in_byte can. */
2744
2745 HOST_WIDE_INT
2746 int_byte_position (const_tree field)
2747 {
2748 return tree_to_shwi (byte_position (field));
2749 }
2750 \f
2751 /* Return the strictest alignment, in bits, that T is known to have. */
2752
2753 unsigned int
2754 expr_align (const_tree t)
2755 {
2756 unsigned int align0, align1;
2757
2758 switch (TREE_CODE (t))
2759 {
2760 CASE_CONVERT: case NON_LVALUE_EXPR:
2761 /* If we have conversions, we know that the alignment of the
2762 object must meet each of the alignments of the types. */
2763 align0 = expr_align (TREE_OPERAND (t, 0));
2764 align1 = TYPE_ALIGN (TREE_TYPE (t));
2765 return MAX (align0, align1);
2766
2767 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
2768 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
2769 case CLEANUP_POINT_EXPR:
2770 /* These don't change the alignment of an object. */
2771 return expr_align (TREE_OPERAND (t, 0));
2772
2773 case COND_EXPR:
2774 /* The best we can do is say that the alignment is the least aligned
2775 of the two arms. */
2776 align0 = expr_align (TREE_OPERAND (t, 1));
2777 align1 = expr_align (TREE_OPERAND (t, 2));
2778 return MIN (align0, align1);
2779
2780 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
2781 meaningfully, it's always 1. */
2782 case LABEL_DECL: case CONST_DECL:
2783 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
2784 case FUNCTION_DECL:
2785 gcc_assert (DECL_ALIGN (t) != 0);
2786 return DECL_ALIGN (t);
2787
2788 default:
2789 break;
2790 }
2791
2792 /* Otherwise take the alignment from that of the type. */
2793 return TYPE_ALIGN (TREE_TYPE (t));
2794 }
2795 \f
2796 /* Return, as a tree node, the number of elements for TYPE (which is an
2797 ARRAY_TYPE) minus one. This counts only elements of the top array. */
2798
2799 tree
2800 array_type_nelts (const_tree type)
2801 {
2802 tree index_type, min, max;
2803
2804 /* If they did it with unspecified bounds, then we should have already
2805 given an error about it before we got here. */
2806 if (! TYPE_DOMAIN (type))
2807 return error_mark_node;
2808
2809 index_type = TYPE_DOMAIN (type);
2810 min = TYPE_MIN_VALUE (index_type);
2811 max = TYPE_MAX_VALUE (index_type);
2812
2813 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
2814 if (!max)
2815 return error_mark_node;
2816
2817 return (integer_zerop (min)
2818 ? max
2819 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
2820 }
2821 \f
2822 /* If arg is static -- a reference to an object in static storage -- then
2823 return the object. This is not the same as the C meaning of `static'.
2824 If arg isn't static, return NULL. */
2825
2826 tree
2827 staticp (tree arg)
2828 {
2829 switch (TREE_CODE (arg))
2830 {
2831 case FUNCTION_DECL:
2832 /* Nested functions are static, even though taking their address will
2833 involve a trampoline as we unnest the nested function and create
2834 the trampoline on the tree level. */
2835 return arg;
2836
2837 case VAR_DECL:
2838 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
2839 && ! DECL_THREAD_LOCAL_P (arg)
2840 && ! DECL_DLLIMPORT_P (arg)
2841 ? arg : NULL);
2842
2843 case CONST_DECL:
2844 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
2845 ? arg : NULL);
2846
2847 case CONSTRUCTOR:
2848 return TREE_STATIC (arg) ? arg : NULL;
2849
2850 case LABEL_DECL:
2851 case STRING_CST:
2852 return arg;
2853
2854 case COMPONENT_REF:
2855 /* If the thing being referenced is not a field, then it is
2856 something language specific. */
2857 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
2858
2859 /* If we are referencing a bitfield, we can't evaluate an
2860 ADDR_EXPR at compile time and so it isn't a constant. */
2861 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
2862 return NULL;
2863
2864 return staticp (TREE_OPERAND (arg, 0));
2865
2866 case BIT_FIELD_REF:
2867 return NULL;
2868
2869 case INDIRECT_REF:
2870 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
2871
2872 case ARRAY_REF:
2873 case ARRAY_RANGE_REF:
2874 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
2875 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
2876 return staticp (TREE_OPERAND (arg, 0));
2877 else
2878 return NULL;
2879
2880 case COMPOUND_LITERAL_EXPR:
2881 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
2882
2883 default:
2884 return NULL;
2885 }
2886 }
2887
2888 \f
2889
2890
2891 /* Return whether OP is a DECL whose address is function-invariant. */
2892
2893 bool
2894 decl_address_invariant_p (const_tree op)
2895 {
2896 /* The conditions below are slightly less strict than the one in
2897 staticp. */
2898
2899 switch (TREE_CODE (op))
2900 {
2901 case PARM_DECL:
2902 case RESULT_DECL:
2903 case LABEL_DECL:
2904 case FUNCTION_DECL:
2905 return true;
2906
2907 case VAR_DECL:
2908 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
2909 || DECL_THREAD_LOCAL_P (op)
2910 || DECL_CONTEXT (op) == current_function_decl
2911 || decl_function_context (op) == current_function_decl)
2912 return true;
2913 break;
2914
2915 case CONST_DECL:
2916 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
2917 || decl_function_context (op) == current_function_decl)
2918 return true;
2919 break;
2920
2921 default:
2922 break;
2923 }
2924
2925 return false;
2926 }
2927
2928 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
2929
2930 bool
2931 decl_address_ip_invariant_p (const_tree op)
2932 {
2933 /* The conditions below are slightly less strict than the one in
2934 staticp. */
2935
2936 switch (TREE_CODE (op))
2937 {
2938 case LABEL_DECL:
2939 case FUNCTION_DECL:
2940 case STRING_CST:
2941 return true;
2942
2943 case VAR_DECL:
2944 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
2945 && !DECL_DLLIMPORT_P (op))
2946 || DECL_THREAD_LOCAL_P (op))
2947 return true;
2948 break;
2949
2950 case CONST_DECL:
2951 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
2952 return true;
2953 break;
2954
2955 default:
2956 break;
2957 }
2958
2959 return false;
2960 }
2961
2962
2963 /* Return true if T is function-invariant (internal function, does
2964 not handle arithmetic; that's handled in skip_simple_arithmetic and
2965 tree_invariant_p). */
2966
2967 static bool tree_invariant_p (tree t);
2968
2969 static bool
2970 tree_invariant_p_1 (tree t)
2971 {
2972 tree op;
2973
2974 if (TREE_CONSTANT (t)
2975 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
2976 return true;
2977
2978 switch (TREE_CODE (t))
2979 {
2980 case SAVE_EXPR:
2981 return true;
2982
2983 case ADDR_EXPR:
2984 op = TREE_OPERAND (t, 0);
2985 while (handled_component_p (op))
2986 {
2987 switch (TREE_CODE (op))
2988 {
2989 case ARRAY_REF:
2990 case ARRAY_RANGE_REF:
2991 if (!tree_invariant_p (TREE_OPERAND (op, 1))
2992 || TREE_OPERAND (op, 2) != NULL_TREE
2993 || TREE_OPERAND (op, 3) != NULL_TREE)
2994 return false;
2995 break;
2996
2997 case COMPONENT_REF:
2998 if (TREE_OPERAND (op, 2) != NULL_TREE)
2999 return false;
3000 break;
3001
3002 default:;
3003 }
3004 op = TREE_OPERAND (op, 0);
3005 }
3006
3007 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3008
3009 default:
3010 break;
3011 }
3012
3013 return false;
3014 }
3015
3016 /* Return true if T is function-invariant. */
3017
3018 static bool
3019 tree_invariant_p (tree t)
3020 {
3021 tree inner = skip_simple_arithmetic (t);
3022 return tree_invariant_p_1 (inner);
3023 }
3024
3025 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3026 Do this to any expression which may be used in more than one place,
3027 but must be evaluated only once.
3028
3029 Normally, expand_expr would reevaluate the expression each time.
3030 Calling save_expr produces something that is evaluated and recorded
3031 the first time expand_expr is called on it. Subsequent calls to
3032 expand_expr just reuse the recorded value.
3033
3034 The call to expand_expr that generates code that actually computes
3035 the value is the first call *at compile time*. Subsequent calls
3036 *at compile time* generate code to use the saved value.
3037 This produces correct result provided that *at run time* control
3038 always flows through the insns made by the first expand_expr
3039 before reaching the other places where the save_expr was evaluated.
3040 You, the caller of save_expr, must make sure this is so.
3041
3042 Constants, and certain read-only nodes, are returned with no
3043 SAVE_EXPR because that is safe. Expressions containing placeholders
3044 are not touched; see tree.def for an explanation of what these
3045 are used for. */
3046
3047 tree
3048 save_expr (tree expr)
3049 {
3050 tree t = fold (expr);
3051 tree inner;
3052
3053 /* If the tree evaluates to a constant, then we don't want to hide that
3054 fact (i.e. this allows further folding, and direct checks for constants).
3055 However, a read-only object that has side effects cannot be bypassed.
3056 Since it is no problem to reevaluate literals, we just return the
3057 literal node. */
3058 inner = skip_simple_arithmetic (t);
3059 if (TREE_CODE (inner) == ERROR_MARK)
3060 return inner;
3061
3062 if (tree_invariant_p_1 (inner))
3063 return t;
3064
3065 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3066 it means that the size or offset of some field of an object depends on
3067 the value within another field.
3068
3069 Note that it must not be the case that T contains both a PLACEHOLDER_EXPR
3070 and some variable since it would then need to be both evaluated once and
3071 evaluated more than once. Front-ends must assure this case cannot
3072 happen by surrounding any such subexpressions in their own SAVE_EXPR
3073 and forcing evaluation at the proper time. */
3074 if (contains_placeholder_p (inner))
3075 return t;
3076
3077 t = build1 (SAVE_EXPR, TREE_TYPE (expr), t);
3078 SET_EXPR_LOCATION (t, EXPR_LOCATION (expr));
3079
3080 /* This expression might be placed ahead of a jump to ensure that the
3081 value was computed on both sides of the jump. So make sure it isn't
3082 eliminated as dead. */
3083 TREE_SIDE_EFFECTS (t) = 1;
3084 return t;
3085 }
3086
3087 /* Look inside EXPR into any simple arithmetic operations. Return the
3088 outermost non-arithmetic or non-invariant node. */
3089
3090 tree
3091 skip_simple_arithmetic (tree expr)
3092 {
3093 /* We don't care about whether this can be used as an lvalue in this
3094 context. */
3095 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3096 expr = TREE_OPERAND (expr, 0);
3097
3098 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3099 a constant, it will be more efficient to not make another SAVE_EXPR since
3100 it will allow better simplification and GCSE will be able to merge the
3101 computations if they actually occur. */
3102 while (true)
3103 {
3104 if (UNARY_CLASS_P (expr))
3105 expr = TREE_OPERAND (expr, 0);
3106 else if (BINARY_CLASS_P (expr))
3107 {
3108 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3109 expr = TREE_OPERAND (expr, 0);
3110 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3111 expr = TREE_OPERAND (expr, 1);
3112 else
3113 break;
3114 }
3115 else
3116 break;
3117 }
3118
3119 return expr;
3120 }
3121
3122 /* Look inside EXPR into simple arithmetic operations involving constants.
3123 Return the outermost non-arithmetic or non-constant node. */
3124
3125 tree
3126 skip_simple_constant_arithmetic (tree expr)
3127 {
3128 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3129 expr = TREE_OPERAND (expr, 0);
3130
3131 while (true)
3132 {
3133 if (UNARY_CLASS_P (expr))
3134 expr = TREE_OPERAND (expr, 0);
3135 else if (BINARY_CLASS_P (expr))
3136 {
3137 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3138 expr = TREE_OPERAND (expr, 0);
3139 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3140 expr = TREE_OPERAND (expr, 1);
3141 else
3142 break;
3143 }
3144 else
3145 break;
3146 }
3147
3148 return expr;
3149 }
3150
3151 /* Return which tree structure is used by T. */
3152
3153 enum tree_node_structure_enum
3154 tree_node_structure (const_tree t)
3155 {
3156 const enum tree_code code = TREE_CODE (t);
3157 return tree_node_structure_for_code (code);
3158 }
3159
3160 /* Set various status flags when building a CALL_EXPR object T. */
3161
3162 static void
3163 process_call_operands (tree t)
3164 {
3165 bool side_effects = TREE_SIDE_EFFECTS (t);
3166 bool read_only = false;
3167 int i = call_expr_flags (t);
3168
3169 /* Calls have side-effects, except those to const or pure functions. */
3170 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3171 side_effects = true;
3172 /* Propagate TREE_READONLY of arguments for const functions. */
3173 if (i & ECF_CONST)
3174 read_only = true;
3175
3176 if (!side_effects || read_only)
3177 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3178 {
3179 tree op = TREE_OPERAND (t, i);
3180 if (op && TREE_SIDE_EFFECTS (op))
3181 side_effects = true;
3182 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3183 read_only = false;
3184 }
3185
3186 TREE_SIDE_EFFECTS (t) = side_effects;
3187 TREE_READONLY (t) = read_only;
3188 }
3189 \f
3190 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3191 size or offset that depends on a field within a record. */
3192
3193 bool
3194 contains_placeholder_p (const_tree exp)
3195 {
3196 enum tree_code code;
3197
3198 if (!exp)
3199 return 0;
3200
3201 code = TREE_CODE (exp);
3202 if (code == PLACEHOLDER_EXPR)
3203 return 1;
3204
3205 switch (TREE_CODE_CLASS (code))
3206 {
3207 case tcc_reference:
3208 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3209 position computations since they will be converted into a
3210 WITH_RECORD_EXPR involving the reference, which will assume
3211 here will be valid. */
3212 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3213
3214 case tcc_exceptional:
3215 if (code == TREE_LIST)
3216 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3217 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3218 break;
3219
3220 case tcc_unary:
3221 case tcc_binary:
3222 case tcc_comparison:
3223 case tcc_expression:
3224 switch (code)
3225 {
3226 case COMPOUND_EXPR:
3227 /* Ignoring the first operand isn't quite right, but works best. */
3228 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3229
3230 case COND_EXPR:
3231 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3232 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3233 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3234
3235 case SAVE_EXPR:
3236 /* The save_expr function never wraps anything containing
3237 a PLACEHOLDER_EXPR. */
3238 return 0;
3239
3240 default:
3241 break;
3242 }
3243
3244 switch (TREE_CODE_LENGTH (code))
3245 {
3246 case 1:
3247 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3248 case 2:
3249 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3250 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3251 default:
3252 return 0;
3253 }
3254
3255 case tcc_vl_exp:
3256 switch (code)
3257 {
3258 case CALL_EXPR:
3259 {
3260 const_tree arg;
3261 const_call_expr_arg_iterator iter;
3262 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3263 if (CONTAINS_PLACEHOLDER_P (arg))
3264 return 1;
3265 return 0;
3266 }
3267 default:
3268 return 0;
3269 }
3270
3271 default:
3272 return 0;
3273 }
3274 return 0;
3275 }
3276
3277 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3278 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3279 field positions. */
3280
3281 static bool
3282 type_contains_placeholder_1 (const_tree type)
3283 {
3284 /* If the size contains a placeholder or the parent type (component type in
3285 the case of arrays) type involves a placeholder, this type does. */
3286 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3287 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3288 || (!POINTER_TYPE_P (type)
3289 && TREE_TYPE (type)
3290 && type_contains_placeholder_p (TREE_TYPE (type))))
3291 return true;
3292
3293 /* Now do type-specific checks. Note that the last part of the check above
3294 greatly limits what we have to do below. */
3295 switch (TREE_CODE (type))
3296 {
3297 case VOID_TYPE:
3298 case POINTER_BOUNDS_TYPE:
3299 case COMPLEX_TYPE:
3300 case ENUMERAL_TYPE:
3301 case BOOLEAN_TYPE:
3302 case POINTER_TYPE:
3303 case OFFSET_TYPE:
3304 case REFERENCE_TYPE:
3305 case METHOD_TYPE:
3306 case FUNCTION_TYPE:
3307 case VECTOR_TYPE:
3308 case NULLPTR_TYPE:
3309 return false;
3310
3311 case INTEGER_TYPE:
3312 case REAL_TYPE:
3313 case FIXED_POINT_TYPE:
3314 /* Here we just check the bounds. */
3315 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3316 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3317
3318 case ARRAY_TYPE:
3319 /* We have already checked the component type above, so just check the
3320 domain type. */
3321 return type_contains_placeholder_p (TYPE_DOMAIN (type));
3322
3323 case RECORD_TYPE:
3324 case UNION_TYPE:
3325 case QUAL_UNION_TYPE:
3326 {
3327 tree field;
3328
3329 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3330 if (TREE_CODE (field) == FIELD_DECL
3331 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3332 || (TREE_CODE (type) == QUAL_UNION_TYPE
3333 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3334 || type_contains_placeholder_p (TREE_TYPE (field))))
3335 return true;
3336
3337 return false;
3338 }
3339
3340 default:
3341 gcc_unreachable ();
3342 }
3343 }
3344
3345 /* Wrapper around above function used to cache its result. */
3346
3347 bool
3348 type_contains_placeholder_p (tree type)
3349 {
3350 bool result;
3351
3352 /* If the contains_placeholder_bits field has been initialized,
3353 then we know the answer. */
3354 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3355 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3356
3357 /* Indicate that we've seen this type node, and the answer is false.
3358 This is what we want to return if we run into recursion via fields. */
3359 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3360
3361 /* Compute the real value. */
3362 result = type_contains_placeholder_1 (type);
3363
3364 /* Store the real value. */
3365 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3366
3367 return result;
3368 }
3369 \f
3370 /* Push tree EXP onto vector QUEUE if it is not already present. */
3371
3372 static void
3373 push_without_duplicates (tree exp, vec<tree> *queue)
3374 {
3375 unsigned int i;
3376 tree iter;
3377
3378 FOR_EACH_VEC_ELT (*queue, i, iter)
3379 if (simple_cst_equal (iter, exp) == 1)
3380 break;
3381
3382 if (!iter)
3383 queue->safe_push (exp);
3384 }
3385
3386 /* Given a tree EXP, find all occurrences of references to fields
3387 in a PLACEHOLDER_EXPR and place them in vector REFS without
3388 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3389 we assume here that EXP contains only arithmetic expressions
3390 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3391 argument list. */
3392
3393 void
3394 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3395 {
3396 enum tree_code code = TREE_CODE (exp);
3397 tree inner;
3398 int i;
3399
3400 /* We handle TREE_LIST and COMPONENT_REF separately. */
3401 if (code == TREE_LIST)
3402 {
3403 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3404 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3405 }
3406 else if (code == COMPONENT_REF)
3407 {
3408 for (inner = TREE_OPERAND (exp, 0);
3409 REFERENCE_CLASS_P (inner);
3410 inner = TREE_OPERAND (inner, 0))
3411 ;
3412
3413 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3414 push_without_duplicates (exp, refs);
3415 else
3416 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3417 }
3418 else
3419 switch (TREE_CODE_CLASS (code))
3420 {
3421 case tcc_constant:
3422 break;
3423
3424 case tcc_declaration:
3425 /* Variables allocated to static storage can stay. */
3426 if (!TREE_STATIC (exp))
3427 push_without_duplicates (exp, refs);
3428 break;
3429
3430 case tcc_expression:
3431 /* This is the pattern built in ada/make_aligning_type. */
3432 if (code == ADDR_EXPR
3433 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3434 {
3435 push_without_duplicates (exp, refs);
3436 break;
3437 }
3438
3439 /* Fall through... */
3440
3441 case tcc_exceptional:
3442 case tcc_unary:
3443 case tcc_binary:
3444 case tcc_comparison:
3445 case tcc_reference:
3446 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3447 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3448 break;
3449
3450 case tcc_vl_exp:
3451 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3452 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3453 break;
3454
3455 default:
3456 gcc_unreachable ();
3457 }
3458 }
3459
3460 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3461 return a tree with all occurrences of references to F in a
3462 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3463 CONST_DECLs. Note that we assume here that EXP contains only
3464 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3465 occurring only in their argument list. */
3466
3467 tree
3468 substitute_in_expr (tree exp, tree f, tree r)
3469 {
3470 enum tree_code code = TREE_CODE (exp);
3471 tree op0, op1, op2, op3;
3472 tree new_tree;
3473
3474 /* We handle TREE_LIST and COMPONENT_REF separately. */
3475 if (code == TREE_LIST)
3476 {
3477 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3478 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3479 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3480 return exp;
3481
3482 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3483 }
3484 else if (code == COMPONENT_REF)
3485 {
3486 tree inner;
3487
3488 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3489 and it is the right field, replace it with R. */
3490 for (inner = TREE_OPERAND (exp, 0);
3491 REFERENCE_CLASS_P (inner);
3492 inner = TREE_OPERAND (inner, 0))
3493 ;
3494
3495 /* The field. */
3496 op1 = TREE_OPERAND (exp, 1);
3497
3498 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3499 return r;
3500
3501 /* If this expression hasn't been completed let, leave it alone. */
3502 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
3503 return exp;
3504
3505 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3506 if (op0 == TREE_OPERAND (exp, 0))
3507 return exp;
3508
3509 new_tree
3510 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
3511 }
3512 else
3513 switch (TREE_CODE_CLASS (code))
3514 {
3515 case tcc_constant:
3516 return exp;
3517
3518 case tcc_declaration:
3519 if (exp == f)
3520 return r;
3521 else
3522 return exp;
3523
3524 case tcc_expression:
3525 if (exp == f)
3526 return r;
3527
3528 /* Fall through... */
3529
3530 case tcc_exceptional:
3531 case tcc_unary:
3532 case tcc_binary:
3533 case tcc_comparison:
3534 case tcc_reference:
3535 switch (TREE_CODE_LENGTH (code))
3536 {
3537 case 0:
3538 return exp;
3539
3540 case 1:
3541 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3542 if (op0 == TREE_OPERAND (exp, 0))
3543 return exp;
3544
3545 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3546 break;
3547
3548 case 2:
3549 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3550 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3551
3552 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3553 return exp;
3554
3555 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3556 break;
3557
3558 case 3:
3559 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3560 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3561 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3562
3563 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3564 && op2 == TREE_OPERAND (exp, 2))
3565 return exp;
3566
3567 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3568 break;
3569
3570 case 4:
3571 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3572 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3573 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3574 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
3575
3576 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3577 && op2 == TREE_OPERAND (exp, 2)
3578 && op3 == TREE_OPERAND (exp, 3))
3579 return exp;
3580
3581 new_tree
3582 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3583 break;
3584
3585 default:
3586 gcc_unreachable ();
3587 }
3588 break;
3589
3590 case tcc_vl_exp:
3591 {
3592 int i;
3593
3594 new_tree = NULL_TREE;
3595
3596 /* If we are trying to replace F with a constant, inline back
3597 functions which do nothing else than computing a value from
3598 the arguments they are passed. This makes it possible to
3599 fold partially or entirely the replacement expression. */
3600 if (CONSTANT_CLASS_P (r) && code == CALL_EXPR)
3601 {
3602 tree t = maybe_inline_call_in_expr (exp);
3603 if (t)
3604 return SUBSTITUTE_IN_EXPR (t, f, r);
3605 }
3606
3607 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3608 {
3609 tree op = TREE_OPERAND (exp, i);
3610 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
3611 if (new_op != op)
3612 {
3613 if (!new_tree)
3614 new_tree = copy_node (exp);
3615 TREE_OPERAND (new_tree, i) = new_op;
3616 }
3617 }
3618
3619 if (new_tree)
3620 {
3621 new_tree = fold (new_tree);
3622 if (TREE_CODE (new_tree) == CALL_EXPR)
3623 process_call_operands (new_tree);
3624 }
3625 else
3626 return exp;
3627 }
3628 break;
3629
3630 default:
3631 gcc_unreachable ();
3632 }
3633
3634 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3635
3636 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3637 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3638
3639 return new_tree;
3640 }
3641
3642 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3643 for it within OBJ, a tree that is an object or a chain of references. */
3644
3645 tree
3646 substitute_placeholder_in_expr (tree exp, tree obj)
3647 {
3648 enum tree_code code = TREE_CODE (exp);
3649 tree op0, op1, op2, op3;
3650 tree new_tree;
3651
3652 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3653 in the chain of OBJ. */
3654 if (code == PLACEHOLDER_EXPR)
3655 {
3656 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
3657 tree elt;
3658
3659 for (elt = obj; elt != 0;
3660 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3661 || TREE_CODE (elt) == COND_EXPR)
3662 ? TREE_OPERAND (elt, 1)
3663 : (REFERENCE_CLASS_P (elt)
3664 || UNARY_CLASS_P (elt)
3665 || BINARY_CLASS_P (elt)
3666 || VL_EXP_CLASS_P (elt)
3667 || EXPRESSION_CLASS_P (elt))
3668 ? TREE_OPERAND (elt, 0) : 0))
3669 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
3670 return elt;
3671
3672 for (elt = obj; elt != 0;
3673 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3674 || TREE_CODE (elt) == COND_EXPR)
3675 ? TREE_OPERAND (elt, 1)
3676 : (REFERENCE_CLASS_P (elt)
3677 || UNARY_CLASS_P (elt)
3678 || BINARY_CLASS_P (elt)
3679 || VL_EXP_CLASS_P (elt)
3680 || EXPRESSION_CLASS_P (elt))
3681 ? TREE_OPERAND (elt, 0) : 0))
3682 if (POINTER_TYPE_P (TREE_TYPE (elt))
3683 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
3684 == need_type))
3685 return fold_build1 (INDIRECT_REF, need_type, elt);
3686
3687 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
3688 survives until RTL generation, there will be an error. */
3689 return exp;
3690 }
3691
3692 /* TREE_LIST is special because we need to look at TREE_VALUE
3693 and TREE_CHAIN, not TREE_OPERANDS. */
3694 else if (code == TREE_LIST)
3695 {
3696 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
3697 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
3698 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3699 return exp;
3700
3701 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3702 }
3703 else
3704 switch (TREE_CODE_CLASS (code))
3705 {
3706 case tcc_constant:
3707 case tcc_declaration:
3708 return exp;
3709
3710 case tcc_exceptional:
3711 case tcc_unary:
3712 case tcc_binary:
3713 case tcc_comparison:
3714 case tcc_expression:
3715 case tcc_reference:
3716 case tcc_statement:
3717 switch (TREE_CODE_LENGTH (code))
3718 {
3719 case 0:
3720 return exp;
3721
3722 case 1:
3723 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3724 if (op0 == TREE_OPERAND (exp, 0))
3725 return exp;
3726
3727 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3728 break;
3729
3730 case 2:
3731 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3732 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3733
3734 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3735 return exp;
3736
3737 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3738 break;
3739
3740 case 3:
3741 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3742 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3743 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3744
3745 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3746 && op2 == TREE_OPERAND (exp, 2))
3747 return exp;
3748
3749 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3750 break;
3751
3752 case 4:
3753 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3754 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3755 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3756 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
3757
3758 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3759 && op2 == TREE_OPERAND (exp, 2)
3760 && op3 == TREE_OPERAND (exp, 3))
3761 return exp;
3762
3763 new_tree
3764 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3765 break;
3766
3767 default:
3768 gcc_unreachable ();
3769 }
3770 break;
3771
3772 case tcc_vl_exp:
3773 {
3774 int i;
3775
3776 new_tree = NULL_TREE;
3777
3778 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3779 {
3780 tree op = TREE_OPERAND (exp, i);
3781 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
3782 if (new_op != op)
3783 {
3784 if (!new_tree)
3785 new_tree = copy_node (exp);
3786 TREE_OPERAND (new_tree, i) = new_op;
3787 }
3788 }
3789
3790 if (new_tree)
3791 {
3792 new_tree = fold (new_tree);
3793 if (TREE_CODE (new_tree) == CALL_EXPR)
3794 process_call_operands (new_tree);
3795 }
3796 else
3797 return exp;
3798 }
3799 break;
3800
3801 default:
3802 gcc_unreachable ();
3803 }
3804
3805 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3806
3807 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3808 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3809
3810 return new_tree;
3811 }
3812 \f
3813
3814 /* Subroutine of stabilize_reference; this is called for subtrees of
3815 references. Any expression with side-effects must be put in a SAVE_EXPR
3816 to ensure that it is only evaluated once.
3817
3818 We don't put SAVE_EXPR nodes around everything, because assigning very
3819 simple expressions to temporaries causes us to miss good opportunities
3820 for optimizations. Among other things, the opportunity to fold in the
3821 addition of a constant into an addressing mode often gets lost, e.g.
3822 "y[i+1] += x;". In general, we take the approach that we should not make
3823 an assignment unless we are forced into it - i.e., that any non-side effect
3824 operator should be allowed, and that cse should take care of coalescing
3825 multiple utterances of the same expression should that prove fruitful. */
3826
3827 static tree
3828 stabilize_reference_1 (tree e)
3829 {
3830 tree result;
3831 enum tree_code code = TREE_CODE (e);
3832
3833 /* We cannot ignore const expressions because it might be a reference
3834 to a const array but whose index contains side-effects. But we can
3835 ignore things that are actual constant or that already have been
3836 handled by this function. */
3837
3838 if (tree_invariant_p (e))
3839 return e;
3840
3841 switch (TREE_CODE_CLASS (code))
3842 {
3843 case tcc_exceptional:
3844 case tcc_type:
3845 case tcc_declaration:
3846 case tcc_comparison:
3847 case tcc_statement:
3848 case tcc_expression:
3849 case tcc_reference:
3850 case tcc_vl_exp:
3851 /* If the expression has side-effects, then encase it in a SAVE_EXPR
3852 so that it will only be evaluated once. */
3853 /* The reference (r) and comparison (<) classes could be handled as
3854 below, but it is generally faster to only evaluate them once. */
3855 if (TREE_SIDE_EFFECTS (e))
3856 return save_expr (e);
3857 return e;
3858
3859 case tcc_constant:
3860 /* Constants need no processing. In fact, we should never reach
3861 here. */
3862 return e;
3863
3864 case tcc_binary:
3865 /* Division is slow and tends to be compiled with jumps,
3866 especially the division by powers of 2 that is often
3867 found inside of an array reference. So do it just once. */
3868 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
3869 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
3870 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
3871 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
3872 return save_expr (e);
3873 /* Recursively stabilize each operand. */
3874 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
3875 stabilize_reference_1 (TREE_OPERAND (e, 1)));
3876 break;
3877
3878 case tcc_unary:
3879 /* Recursively stabilize each operand. */
3880 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
3881 break;
3882
3883 default:
3884 gcc_unreachable ();
3885 }
3886
3887 TREE_TYPE (result) = TREE_TYPE (e);
3888 TREE_READONLY (result) = TREE_READONLY (e);
3889 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
3890 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
3891
3892 return result;
3893 }
3894
3895 /* Stabilize a reference so that we can use it any number of times
3896 without causing its operands to be evaluated more than once.
3897 Returns the stabilized reference. This works by means of save_expr,
3898 so see the caveats in the comments about save_expr.
3899
3900 Also allows conversion expressions whose operands are references.
3901 Any other kind of expression is returned unchanged. */
3902
3903 tree
3904 stabilize_reference (tree ref)
3905 {
3906 tree result;
3907 enum tree_code code = TREE_CODE (ref);
3908
3909 switch (code)
3910 {
3911 case VAR_DECL:
3912 case PARM_DECL:
3913 case RESULT_DECL:
3914 /* No action is needed in this case. */
3915 return ref;
3916
3917 CASE_CONVERT:
3918 case FLOAT_EXPR:
3919 case FIX_TRUNC_EXPR:
3920 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
3921 break;
3922
3923 case INDIRECT_REF:
3924 result = build_nt (INDIRECT_REF,
3925 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
3926 break;
3927
3928 case COMPONENT_REF:
3929 result = build_nt (COMPONENT_REF,
3930 stabilize_reference (TREE_OPERAND (ref, 0)),
3931 TREE_OPERAND (ref, 1), NULL_TREE);
3932 break;
3933
3934 case BIT_FIELD_REF:
3935 result = build_nt (BIT_FIELD_REF,
3936 stabilize_reference (TREE_OPERAND (ref, 0)),
3937 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
3938 break;
3939
3940 case ARRAY_REF:
3941 result = build_nt (ARRAY_REF,
3942 stabilize_reference (TREE_OPERAND (ref, 0)),
3943 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
3944 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
3945 break;
3946
3947 case ARRAY_RANGE_REF:
3948 result = build_nt (ARRAY_RANGE_REF,
3949 stabilize_reference (TREE_OPERAND (ref, 0)),
3950 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
3951 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
3952 break;
3953
3954 case COMPOUND_EXPR:
3955 /* We cannot wrap the first expression in a SAVE_EXPR, as then
3956 it wouldn't be ignored. This matters when dealing with
3957 volatiles. */
3958 return stabilize_reference_1 (ref);
3959
3960 /* If arg isn't a kind of lvalue we recognize, make no change.
3961 Caller should recognize the error for an invalid lvalue. */
3962 default:
3963 return ref;
3964
3965 case ERROR_MARK:
3966 return error_mark_node;
3967 }
3968
3969 TREE_TYPE (result) = TREE_TYPE (ref);
3970 TREE_READONLY (result) = TREE_READONLY (ref);
3971 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
3972 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
3973
3974 return result;
3975 }
3976 \f
3977 /* Low-level constructors for expressions. */
3978
3979 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
3980 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
3981
3982 void
3983 recompute_tree_invariant_for_addr_expr (tree t)
3984 {
3985 tree node;
3986 bool tc = true, se = false;
3987
3988 /* We started out assuming this address is both invariant and constant, but
3989 does not have side effects. Now go down any handled components and see if
3990 any of them involve offsets that are either non-constant or non-invariant.
3991 Also check for side-effects.
3992
3993 ??? Note that this code makes no attempt to deal with the case where
3994 taking the address of something causes a copy due to misalignment. */
3995
3996 #define UPDATE_FLAGS(NODE) \
3997 do { tree _node = (NODE); \
3998 if (_node && !TREE_CONSTANT (_node)) tc = false; \
3999 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4000
4001 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4002 node = TREE_OPERAND (node, 0))
4003 {
4004 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4005 array reference (probably made temporarily by the G++ front end),
4006 so ignore all the operands. */
4007 if ((TREE_CODE (node) == ARRAY_REF
4008 || TREE_CODE (node) == ARRAY_RANGE_REF)
4009 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4010 {
4011 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4012 if (TREE_OPERAND (node, 2))
4013 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4014 if (TREE_OPERAND (node, 3))
4015 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4016 }
4017 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4018 FIELD_DECL, apparently. The G++ front end can put something else
4019 there, at least temporarily. */
4020 else if (TREE_CODE (node) == COMPONENT_REF
4021 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4022 {
4023 if (TREE_OPERAND (node, 2))
4024 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4025 }
4026 }
4027
4028 node = lang_hooks.expr_to_decl (node, &tc, &se);
4029
4030 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4031 the address, since &(*a)->b is a form of addition. If it's a constant, the
4032 address is constant too. If it's a decl, its address is constant if the
4033 decl is static. Everything else is not constant and, furthermore,
4034 taking the address of a volatile variable is not volatile. */
4035 if (TREE_CODE (node) == INDIRECT_REF
4036 || TREE_CODE (node) == MEM_REF)
4037 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4038 else if (CONSTANT_CLASS_P (node))
4039 ;
4040 else if (DECL_P (node))
4041 tc &= (staticp (node) != NULL_TREE);
4042 else
4043 {
4044 tc = false;
4045 se |= TREE_SIDE_EFFECTS (node);
4046 }
4047
4048
4049 TREE_CONSTANT (t) = tc;
4050 TREE_SIDE_EFFECTS (t) = se;
4051 #undef UPDATE_FLAGS
4052 }
4053
4054 /* Build an expression of code CODE, data type TYPE, and operands as
4055 specified. Expressions and reference nodes can be created this way.
4056 Constants, decls, types and misc nodes cannot be.
4057
4058 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4059 enough for all extant tree codes. */
4060
4061 tree
4062 build0_stat (enum tree_code code, tree tt MEM_STAT_DECL)
4063 {
4064 tree t;
4065
4066 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4067
4068 t = make_node_stat (code PASS_MEM_STAT);
4069 TREE_TYPE (t) = tt;
4070
4071 return t;
4072 }
4073
4074 tree
4075 build1_stat (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4076 {
4077 int length = sizeof (struct tree_exp);
4078 tree t;
4079
4080 record_node_allocation_statistics (code, length);
4081
4082 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4083
4084 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4085
4086 memset (t, 0, sizeof (struct tree_common));
4087
4088 TREE_SET_CODE (t, code);
4089
4090 TREE_TYPE (t) = type;
4091 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4092 TREE_OPERAND (t, 0) = node;
4093 if (node && !TYPE_P (node))
4094 {
4095 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4096 TREE_READONLY (t) = TREE_READONLY (node);
4097 }
4098
4099 if (TREE_CODE_CLASS (code) == tcc_statement)
4100 TREE_SIDE_EFFECTS (t) = 1;
4101 else switch (code)
4102 {
4103 case VA_ARG_EXPR:
4104 /* All of these have side-effects, no matter what their
4105 operands are. */
4106 TREE_SIDE_EFFECTS (t) = 1;
4107 TREE_READONLY (t) = 0;
4108 break;
4109
4110 case INDIRECT_REF:
4111 /* Whether a dereference is readonly has nothing to do with whether
4112 its operand is readonly. */
4113 TREE_READONLY (t) = 0;
4114 break;
4115
4116 case ADDR_EXPR:
4117 if (node)
4118 recompute_tree_invariant_for_addr_expr (t);
4119 break;
4120
4121 default:
4122 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4123 && node && !TYPE_P (node)
4124 && TREE_CONSTANT (node))
4125 TREE_CONSTANT (t) = 1;
4126 if (TREE_CODE_CLASS (code) == tcc_reference
4127 && node && TREE_THIS_VOLATILE (node))
4128 TREE_THIS_VOLATILE (t) = 1;
4129 break;
4130 }
4131
4132 return t;
4133 }
4134
4135 #define PROCESS_ARG(N) \
4136 do { \
4137 TREE_OPERAND (t, N) = arg##N; \
4138 if (arg##N &&!TYPE_P (arg##N)) \
4139 { \
4140 if (TREE_SIDE_EFFECTS (arg##N)) \
4141 side_effects = 1; \
4142 if (!TREE_READONLY (arg##N) \
4143 && !CONSTANT_CLASS_P (arg##N)) \
4144 (void) (read_only = 0); \
4145 if (!TREE_CONSTANT (arg##N)) \
4146 (void) (constant = 0); \
4147 } \
4148 } while (0)
4149
4150 tree
4151 build2_stat (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4152 {
4153 bool constant, read_only, side_effects;
4154 tree t;
4155
4156 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4157
4158 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4159 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4160 /* When sizetype precision doesn't match that of pointers
4161 we need to be able to build explicit extensions or truncations
4162 of the offset argument. */
4163 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4164 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4165 && TREE_CODE (arg1) == INTEGER_CST);
4166
4167 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4168 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4169 && ptrofftype_p (TREE_TYPE (arg1)));
4170
4171 t = make_node_stat (code PASS_MEM_STAT);
4172 TREE_TYPE (t) = tt;
4173
4174 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4175 result based on those same flags for the arguments. But if the
4176 arguments aren't really even `tree' expressions, we shouldn't be trying
4177 to do this. */
4178
4179 /* Expressions without side effects may be constant if their
4180 arguments are as well. */
4181 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4182 || TREE_CODE_CLASS (code) == tcc_binary);
4183 read_only = 1;
4184 side_effects = TREE_SIDE_EFFECTS (t);
4185
4186 PROCESS_ARG (0);
4187 PROCESS_ARG (1);
4188
4189 TREE_READONLY (t) = read_only;
4190 TREE_CONSTANT (t) = constant;
4191 TREE_SIDE_EFFECTS (t) = side_effects;
4192 TREE_THIS_VOLATILE (t)
4193 = (TREE_CODE_CLASS (code) == tcc_reference
4194 && arg0 && TREE_THIS_VOLATILE (arg0));
4195
4196 return t;
4197 }
4198
4199
4200 tree
4201 build3_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4202 tree arg2 MEM_STAT_DECL)
4203 {
4204 bool constant, read_only, side_effects;
4205 tree t;
4206
4207 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4208 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4209
4210 t = make_node_stat (code PASS_MEM_STAT);
4211 TREE_TYPE (t) = tt;
4212
4213 read_only = 1;
4214
4215 /* As a special exception, if COND_EXPR has NULL branches, we
4216 assume that it is a gimple statement and always consider
4217 it to have side effects. */
4218 if (code == COND_EXPR
4219 && tt == void_type_node
4220 && arg1 == NULL_TREE
4221 && arg2 == NULL_TREE)
4222 side_effects = true;
4223 else
4224 side_effects = TREE_SIDE_EFFECTS (t);
4225
4226 PROCESS_ARG (0);
4227 PROCESS_ARG (1);
4228 PROCESS_ARG (2);
4229
4230 if (code == COND_EXPR)
4231 TREE_READONLY (t) = read_only;
4232
4233 TREE_SIDE_EFFECTS (t) = side_effects;
4234 TREE_THIS_VOLATILE (t)
4235 = (TREE_CODE_CLASS (code) == tcc_reference
4236 && arg0 && TREE_THIS_VOLATILE (arg0));
4237
4238 return t;
4239 }
4240
4241 tree
4242 build4_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4243 tree arg2, tree arg3 MEM_STAT_DECL)
4244 {
4245 bool constant, read_only, side_effects;
4246 tree t;
4247
4248 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4249
4250 t = make_node_stat (code PASS_MEM_STAT);
4251 TREE_TYPE (t) = tt;
4252
4253 side_effects = TREE_SIDE_EFFECTS (t);
4254
4255 PROCESS_ARG (0);
4256 PROCESS_ARG (1);
4257 PROCESS_ARG (2);
4258 PROCESS_ARG (3);
4259
4260 TREE_SIDE_EFFECTS (t) = side_effects;
4261 TREE_THIS_VOLATILE (t)
4262 = (TREE_CODE_CLASS (code) == tcc_reference
4263 && arg0 && TREE_THIS_VOLATILE (arg0));
4264
4265 return t;
4266 }
4267
4268 tree
4269 build5_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4270 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4271 {
4272 bool constant, read_only, side_effects;
4273 tree t;
4274
4275 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4276
4277 t = make_node_stat (code PASS_MEM_STAT);
4278 TREE_TYPE (t) = tt;
4279
4280 side_effects = TREE_SIDE_EFFECTS (t);
4281
4282 PROCESS_ARG (0);
4283 PROCESS_ARG (1);
4284 PROCESS_ARG (2);
4285 PROCESS_ARG (3);
4286 PROCESS_ARG (4);
4287
4288 TREE_SIDE_EFFECTS (t) = side_effects;
4289 TREE_THIS_VOLATILE (t)
4290 = (TREE_CODE_CLASS (code) == tcc_reference
4291 && arg0 && TREE_THIS_VOLATILE (arg0));
4292
4293 return t;
4294 }
4295
4296 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4297 on the pointer PTR. */
4298
4299 tree
4300 build_simple_mem_ref_loc (location_t loc, tree ptr)
4301 {
4302 HOST_WIDE_INT offset = 0;
4303 tree ptype = TREE_TYPE (ptr);
4304 tree tem;
4305 /* For convenience allow addresses that collapse to a simple base
4306 and offset. */
4307 if (TREE_CODE (ptr) == ADDR_EXPR
4308 && (handled_component_p (TREE_OPERAND (ptr, 0))
4309 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4310 {
4311 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4312 gcc_assert (ptr);
4313 ptr = build_fold_addr_expr (ptr);
4314 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4315 }
4316 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4317 ptr, build_int_cst (ptype, offset));
4318 SET_EXPR_LOCATION (tem, loc);
4319 return tem;
4320 }
4321
4322 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4323
4324 offset_int
4325 mem_ref_offset (const_tree t)
4326 {
4327 return offset_int::from (TREE_OPERAND (t, 1), SIGNED);
4328 }
4329
4330 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4331 offsetted by OFFSET units. */
4332
4333 tree
4334 build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
4335 {
4336 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4337 build_fold_addr_expr (base),
4338 build_int_cst (ptr_type_node, offset));
4339 tree addr = build1 (ADDR_EXPR, type, ref);
4340 recompute_tree_invariant_for_addr_expr (addr);
4341 return addr;
4342 }
4343
4344 /* Similar except don't specify the TREE_TYPE
4345 and leave the TREE_SIDE_EFFECTS as 0.
4346 It is permissible for arguments to be null,
4347 or even garbage if their values do not matter. */
4348
4349 tree
4350 build_nt (enum tree_code code, ...)
4351 {
4352 tree t;
4353 int length;
4354 int i;
4355 va_list p;
4356
4357 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4358
4359 va_start (p, code);
4360
4361 t = make_node (code);
4362 length = TREE_CODE_LENGTH (code);
4363
4364 for (i = 0; i < length; i++)
4365 TREE_OPERAND (t, i) = va_arg (p, tree);
4366
4367 va_end (p);
4368 return t;
4369 }
4370
4371 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4372 tree vec. */
4373
4374 tree
4375 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4376 {
4377 tree ret, t;
4378 unsigned int ix;
4379
4380 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4381 CALL_EXPR_FN (ret) = fn;
4382 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4383 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4384 CALL_EXPR_ARG (ret, ix) = t;
4385 return ret;
4386 }
4387 \f
4388 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4389 We do NOT enter this node in any sort of symbol table.
4390
4391 LOC is the location of the decl.
4392
4393 layout_decl is used to set up the decl's storage layout.
4394 Other slots are initialized to 0 or null pointers. */
4395
4396 tree
4397 build_decl_stat (location_t loc, enum tree_code code, tree name,
4398 tree type MEM_STAT_DECL)
4399 {
4400 tree t;
4401
4402 t = make_node_stat (code PASS_MEM_STAT);
4403 DECL_SOURCE_LOCATION (t) = loc;
4404
4405 /* if (type == error_mark_node)
4406 type = integer_type_node; */
4407 /* That is not done, deliberately, so that having error_mark_node
4408 as the type can suppress useless errors in the use of this variable. */
4409
4410 DECL_NAME (t) = name;
4411 TREE_TYPE (t) = type;
4412
4413 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4414 layout_decl (t, 0);
4415
4416 return t;
4417 }
4418
4419 /* Builds and returns function declaration with NAME and TYPE. */
4420
4421 tree
4422 build_fn_decl (const char *name, tree type)
4423 {
4424 tree id = get_identifier (name);
4425 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4426
4427 DECL_EXTERNAL (decl) = 1;
4428 TREE_PUBLIC (decl) = 1;
4429 DECL_ARTIFICIAL (decl) = 1;
4430 TREE_NOTHROW (decl) = 1;
4431
4432 return decl;
4433 }
4434
4435 vec<tree, va_gc> *all_translation_units;
4436
4437 /* Builds a new translation-unit decl with name NAME, queues it in the
4438 global list of translation-unit decls and returns it. */
4439
4440 tree
4441 build_translation_unit_decl (tree name)
4442 {
4443 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
4444 name, NULL_TREE);
4445 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
4446 vec_safe_push (all_translation_units, tu);
4447 return tu;
4448 }
4449
4450 \f
4451 /* BLOCK nodes are used to represent the structure of binding contours
4452 and declarations, once those contours have been exited and their contents
4453 compiled. This information is used for outputting debugging info. */
4454
4455 tree
4456 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
4457 {
4458 tree block = make_node (BLOCK);
4459
4460 BLOCK_VARS (block) = vars;
4461 BLOCK_SUBBLOCKS (block) = subblocks;
4462 BLOCK_SUPERCONTEXT (block) = supercontext;
4463 BLOCK_CHAIN (block) = chain;
4464 return block;
4465 }
4466
4467 \f
4468 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4469
4470 LOC is the location to use in tree T. */
4471
4472 void
4473 protected_set_expr_location (tree t, location_t loc)
4474 {
4475 if (t && CAN_HAVE_LOCATION_P (t))
4476 SET_EXPR_LOCATION (t, loc);
4477 }
4478 \f
4479 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES
4480 is ATTRIBUTE. */
4481
4482 tree
4483 build_decl_attribute_variant (tree ddecl, tree attribute)
4484 {
4485 DECL_ATTRIBUTES (ddecl) = attribute;
4486 return ddecl;
4487 }
4488
4489 /* Borrowed from hashtab.c iterative_hash implementation. */
4490 #define mix(a,b,c) \
4491 { \
4492 a -= b; a -= c; a ^= (c>>13); \
4493 b -= c; b -= a; b ^= (a<< 8); \
4494 c -= a; c -= b; c ^= ((b&0xffffffff)>>13); \
4495 a -= b; a -= c; a ^= ((c&0xffffffff)>>12); \
4496 b -= c; b -= a; b = (b ^ (a<<16)) & 0xffffffff; \
4497 c -= a; c -= b; c = (c ^ (b>> 5)) & 0xffffffff; \
4498 a -= b; a -= c; a = (a ^ (c>> 3)) & 0xffffffff; \
4499 b -= c; b -= a; b = (b ^ (a<<10)) & 0xffffffff; \
4500 c -= a; c -= b; c = (c ^ (b>>15)) & 0xffffffff; \
4501 }
4502
4503
4504 /* Produce good hash value combining VAL and VAL2. */
4505 hashval_t
4506 iterative_hash_hashval_t (hashval_t val, hashval_t val2)
4507 {
4508 /* the golden ratio; an arbitrary value. */
4509 hashval_t a = 0x9e3779b9;
4510
4511 mix (a, val, val2);
4512 return val2;
4513 }
4514
4515 /* Produce good hash value combining VAL and VAL2. */
4516 hashval_t
4517 iterative_hash_host_wide_int (HOST_WIDE_INT val, hashval_t val2)
4518 {
4519 if (sizeof (HOST_WIDE_INT) == sizeof (hashval_t))
4520 return iterative_hash_hashval_t (val, val2);
4521 else
4522 {
4523 hashval_t a = (hashval_t) val;
4524 /* Avoid warnings about shifting of more than the width of the type on
4525 hosts that won't execute this path. */
4526 int zero = 0;
4527 hashval_t b = (hashval_t) (val >> (sizeof (hashval_t) * 8 + zero));
4528 mix (a, b, val2);
4529 if (sizeof (HOST_WIDE_INT) > 2 * sizeof (hashval_t))
4530 {
4531 hashval_t a = (hashval_t) (val >> (sizeof (hashval_t) * 16 + zero));
4532 hashval_t b = (hashval_t) (val >> (sizeof (hashval_t) * 24 + zero));
4533 mix (a, b, val2);
4534 }
4535 return val2;
4536 }
4537 }
4538
4539 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4540 is ATTRIBUTE and its qualifiers are QUALS.
4541
4542 Record such modified types already made so we don't make duplicates. */
4543
4544 tree
4545 build_type_attribute_qual_variant (tree ttype, tree attribute, int quals)
4546 {
4547 if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype), attribute))
4548 {
4549 hashval_t hashcode = 0;
4550 tree ntype;
4551 int i;
4552 tree t;
4553 enum tree_code code = TREE_CODE (ttype);
4554
4555 /* Building a distinct copy of a tagged type is inappropriate; it
4556 causes breakage in code that expects there to be a one-to-one
4557 relationship between a struct and its fields.
4558 build_duplicate_type is another solution (as used in
4559 handle_transparent_union_attribute), but that doesn't play well
4560 with the stronger C++ type identity model. */
4561 if (TREE_CODE (ttype) == RECORD_TYPE
4562 || TREE_CODE (ttype) == UNION_TYPE
4563 || TREE_CODE (ttype) == QUAL_UNION_TYPE
4564 || TREE_CODE (ttype) == ENUMERAL_TYPE)
4565 {
4566 warning (OPT_Wattributes,
4567 "ignoring attributes applied to %qT after definition",
4568 TYPE_MAIN_VARIANT (ttype));
4569 return build_qualified_type (ttype, quals);
4570 }
4571
4572 ttype = build_qualified_type (ttype, TYPE_UNQUALIFIED);
4573 ntype = build_distinct_type_copy (ttype);
4574
4575 TYPE_ATTRIBUTES (ntype) = attribute;
4576
4577 hashcode = iterative_hash_object (code, hashcode);
4578 if (TREE_TYPE (ntype))
4579 hashcode = iterative_hash_object (TYPE_HASH (TREE_TYPE (ntype)),
4580 hashcode);
4581 hashcode = attribute_hash_list (attribute, hashcode);
4582
4583 switch (TREE_CODE (ntype))
4584 {
4585 case FUNCTION_TYPE:
4586 hashcode = type_hash_list (TYPE_ARG_TYPES (ntype), hashcode);
4587 break;
4588 case ARRAY_TYPE:
4589 if (TYPE_DOMAIN (ntype))
4590 hashcode = iterative_hash_object (TYPE_HASH (TYPE_DOMAIN (ntype)),
4591 hashcode);
4592 break;
4593 case INTEGER_TYPE:
4594 t = TYPE_MAX_VALUE (ntype);
4595 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
4596 hashcode = iterative_hash_object (TREE_INT_CST_ELT (t, i), hashcode);
4597 break;
4598 case REAL_TYPE:
4599 case FIXED_POINT_TYPE:
4600 {
4601 unsigned int precision = TYPE_PRECISION (ntype);
4602 hashcode = iterative_hash_object (precision, hashcode);
4603 }
4604 break;
4605 default:
4606 break;
4607 }
4608
4609 ntype = type_hash_canon (hashcode, ntype);
4610
4611 /* If the target-dependent attributes make NTYPE different from
4612 its canonical type, we will need to use structural equality
4613 checks for this type. */
4614 if (TYPE_STRUCTURAL_EQUALITY_P (ttype)
4615 || !comp_type_attributes (ntype, ttype))
4616 SET_TYPE_STRUCTURAL_EQUALITY (ntype);
4617 else if (TYPE_CANONICAL (ntype) == ntype)
4618 TYPE_CANONICAL (ntype) = TYPE_CANONICAL (ttype);
4619
4620 ttype = build_qualified_type (ntype, quals);
4621 }
4622 else if (TYPE_QUALS (ttype) != quals)
4623 ttype = build_qualified_type (ttype, quals);
4624
4625 return ttype;
4626 }
4627
4628 /* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are
4629 the same. */
4630
4631 static bool
4632 omp_declare_simd_clauses_equal (tree clauses1, tree clauses2)
4633 {
4634 tree cl1, cl2;
4635 for (cl1 = clauses1, cl2 = clauses2;
4636 cl1 && cl2;
4637 cl1 = OMP_CLAUSE_CHAIN (cl1), cl2 = OMP_CLAUSE_CHAIN (cl2))
4638 {
4639 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_CODE (cl2))
4640 return false;
4641 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_SIMDLEN)
4642 {
4643 if (simple_cst_equal (OMP_CLAUSE_DECL (cl1),
4644 OMP_CLAUSE_DECL (cl2)) != 1)
4645 return false;
4646 }
4647 switch (OMP_CLAUSE_CODE (cl1))
4648 {
4649 case OMP_CLAUSE_ALIGNED:
4650 if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1),
4651 OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2)) != 1)
4652 return false;
4653 break;
4654 case OMP_CLAUSE_LINEAR:
4655 if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1),
4656 OMP_CLAUSE_LINEAR_STEP (cl2)) != 1)
4657 return false;
4658 break;
4659 case OMP_CLAUSE_SIMDLEN:
4660 if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1),
4661 OMP_CLAUSE_SIMDLEN_EXPR (cl2)) != 1)
4662 return false;
4663 default:
4664 break;
4665 }
4666 }
4667 return true;
4668 }
4669
4670 /* Compare two constructor-element-type constants. Return 1 if the lists
4671 are known to be equal; otherwise return 0. */
4672
4673 static bool
4674 simple_cst_list_equal (const_tree l1, const_tree l2)
4675 {
4676 while (l1 != NULL_TREE && l2 != NULL_TREE)
4677 {
4678 if (simple_cst_equal (TREE_VALUE (l1), TREE_VALUE (l2)) != 1)
4679 return false;
4680
4681 l1 = TREE_CHAIN (l1);
4682 l2 = TREE_CHAIN (l2);
4683 }
4684
4685 return l1 == l2;
4686 }
4687
4688 /* Compare two attributes for their value identity. Return true if the
4689 attribute values are known to be equal; otherwise return false.
4690 */
4691
4692 static bool
4693 attribute_value_equal (const_tree attr1, const_tree attr2)
4694 {
4695 if (TREE_VALUE (attr1) == TREE_VALUE (attr2))
4696 return true;
4697
4698 if (TREE_VALUE (attr1) != NULL_TREE
4699 && TREE_CODE (TREE_VALUE (attr1)) == TREE_LIST
4700 && TREE_VALUE (attr2) != NULL
4701 && TREE_CODE (TREE_VALUE (attr2)) == TREE_LIST)
4702 return (simple_cst_list_equal (TREE_VALUE (attr1),
4703 TREE_VALUE (attr2)) == 1);
4704
4705 if ((flag_openmp || flag_openmp_simd)
4706 && TREE_VALUE (attr1) && TREE_VALUE (attr2)
4707 && TREE_CODE (TREE_VALUE (attr1)) == OMP_CLAUSE
4708 && TREE_CODE (TREE_VALUE (attr2)) == OMP_CLAUSE)
4709 return omp_declare_simd_clauses_equal (TREE_VALUE (attr1),
4710 TREE_VALUE (attr2));
4711
4712 return (simple_cst_equal (TREE_VALUE (attr1), TREE_VALUE (attr2)) == 1);
4713 }
4714
4715 /* Return 0 if the attributes for two types are incompatible, 1 if they
4716 are compatible, and 2 if they are nearly compatible (which causes a
4717 warning to be generated). */
4718 int
4719 comp_type_attributes (const_tree type1, const_tree type2)
4720 {
4721 const_tree a1 = TYPE_ATTRIBUTES (type1);
4722 const_tree a2 = TYPE_ATTRIBUTES (type2);
4723 const_tree a;
4724
4725 if (a1 == a2)
4726 return 1;
4727 for (a = a1; a != NULL_TREE; a = TREE_CHAIN (a))
4728 {
4729 const struct attribute_spec *as;
4730 const_tree attr;
4731
4732 as = lookup_attribute_spec (get_attribute_name (a));
4733 if (!as || as->affects_type_identity == false)
4734 continue;
4735
4736 attr = lookup_attribute (as->name, CONST_CAST_TREE (a2));
4737 if (!attr || !attribute_value_equal (a, attr))
4738 break;
4739 }
4740 if (!a)
4741 {
4742 for (a = a2; a != NULL_TREE; a = TREE_CHAIN (a))
4743 {
4744 const struct attribute_spec *as;
4745
4746 as = lookup_attribute_spec (get_attribute_name (a));
4747 if (!as || as->affects_type_identity == false)
4748 continue;
4749
4750 if (!lookup_attribute (as->name, CONST_CAST_TREE (a1)))
4751 break;
4752 /* We don't need to compare trees again, as we did this
4753 already in first loop. */
4754 }
4755 /* All types - affecting identity - are equal, so
4756 there is no need to call target hook for comparison. */
4757 if (!a)
4758 return 1;
4759 }
4760 /* As some type combinations - like default calling-convention - might
4761 be compatible, we have to call the target hook to get the final result. */
4762 return targetm.comp_type_attributes (type1, type2);
4763 }
4764
4765 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4766 is ATTRIBUTE.
4767
4768 Record such modified types already made so we don't make duplicates. */
4769
4770 tree
4771 build_type_attribute_variant (tree ttype, tree attribute)
4772 {
4773 return build_type_attribute_qual_variant (ttype, attribute,
4774 TYPE_QUALS (ttype));
4775 }
4776
4777
4778 /* Reset the expression *EXPR_P, a size or position.
4779
4780 ??? We could reset all non-constant sizes or positions. But it's cheap
4781 enough to not do so and refrain from adding workarounds to dwarf2out.c.
4782
4783 We need to reset self-referential sizes or positions because they cannot
4784 be gimplified and thus can contain a CALL_EXPR after the gimplification
4785 is finished, which will run afoul of LTO streaming. And they need to be
4786 reset to something essentially dummy but not constant, so as to preserve
4787 the properties of the object they are attached to. */
4788
4789 static inline void
4790 free_lang_data_in_one_sizepos (tree *expr_p)
4791 {
4792 tree expr = *expr_p;
4793 if (CONTAINS_PLACEHOLDER_P (expr))
4794 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
4795 }
4796
4797
4798 /* Reset all the fields in a binfo node BINFO. We only keep
4799 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
4800
4801 static void
4802 free_lang_data_in_binfo (tree binfo)
4803 {
4804 unsigned i;
4805 tree t;
4806
4807 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
4808
4809 BINFO_VIRTUALS (binfo) = NULL_TREE;
4810 BINFO_BASE_ACCESSES (binfo) = NULL;
4811 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
4812 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
4813
4814 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
4815 free_lang_data_in_binfo (t);
4816 }
4817
4818
4819 /* Reset all language specific information still present in TYPE. */
4820
4821 static void
4822 free_lang_data_in_type (tree type)
4823 {
4824 gcc_assert (TYPE_P (type));
4825
4826 /* Give the FE a chance to remove its own data first. */
4827 lang_hooks.free_lang_data (type);
4828
4829 TREE_LANG_FLAG_0 (type) = 0;
4830 TREE_LANG_FLAG_1 (type) = 0;
4831 TREE_LANG_FLAG_2 (type) = 0;
4832 TREE_LANG_FLAG_3 (type) = 0;
4833 TREE_LANG_FLAG_4 (type) = 0;
4834 TREE_LANG_FLAG_5 (type) = 0;
4835 TREE_LANG_FLAG_6 (type) = 0;
4836
4837 if (TREE_CODE (type) == FUNCTION_TYPE)
4838 {
4839 /* Remove the const and volatile qualifiers from arguments. The
4840 C++ front end removes them, but the C front end does not,
4841 leading to false ODR violation errors when merging two
4842 instances of the same function signature compiled by
4843 different front ends. */
4844 tree p;
4845
4846 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
4847 {
4848 tree arg_type = TREE_VALUE (p);
4849
4850 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
4851 {
4852 int quals = TYPE_QUALS (arg_type)
4853 & ~TYPE_QUAL_CONST
4854 & ~TYPE_QUAL_VOLATILE;
4855 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
4856 free_lang_data_in_type (TREE_VALUE (p));
4857 }
4858 }
4859 }
4860
4861 /* Remove members that are not actually FIELD_DECLs from the field
4862 list of an aggregate. These occur in C++. */
4863 if (RECORD_OR_UNION_TYPE_P (type))
4864 {
4865 tree prev, member;
4866
4867 /* Note that TYPE_FIELDS can be shared across distinct
4868 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is
4869 to be removed, we cannot set its TREE_CHAIN to NULL.
4870 Otherwise, we would not be able to find all the other fields
4871 in the other instances of this TREE_TYPE.
4872
4873 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */
4874 prev = NULL_TREE;
4875 member = TYPE_FIELDS (type);
4876 while (member)
4877 {
4878 if (TREE_CODE (member) == FIELD_DECL
4879 || TREE_CODE (member) == TYPE_DECL)
4880 {
4881 if (prev)
4882 TREE_CHAIN (prev) = member;
4883 else
4884 TYPE_FIELDS (type) = member;
4885 prev = member;
4886 }
4887
4888 member = TREE_CHAIN (member);
4889 }
4890
4891 if (prev)
4892 TREE_CHAIN (prev) = NULL_TREE;
4893 else
4894 TYPE_FIELDS (type) = NULL_TREE;
4895
4896 TYPE_METHODS (type) = NULL_TREE;
4897 if (TYPE_BINFO (type))
4898 free_lang_data_in_binfo (TYPE_BINFO (type));
4899 }
4900 else
4901 {
4902 /* For non-aggregate types, clear out the language slot (which
4903 overloads TYPE_BINFO). */
4904 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
4905
4906 if (INTEGRAL_TYPE_P (type)
4907 || SCALAR_FLOAT_TYPE_P (type)
4908 || FIXED_POINT_TYPE_P (type))
4909 {
4910 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
4911 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
4912 }
4913 }
4914
4915 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
4916 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
4917
4918 if (TYPE_CONTEXT (type)
4919 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
4920 {
4921 tree ctx = TYPE_CONTEXT (type);
4922 do
4923 {
4924 ctx = BLOCK_SUPERCONTEXT (ctx);
4925 }
4926 while (ctx && TREE_CODE (ctx) == BLOCK);
4927 TYPE_CONTEXT (type) = ctx;
4928 }
4929 }
4930
4931
4932 /* Return true if DECL may need an assembler name to be set. */
4933
4934 static inline bool
4935 need_assembler_name_p (tree decl)
4936 {
4937 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
4938 if (TREE_CODE (decl) != FUNCTION_DECL
4939 && TREE_CODE (decl) != VAR_DECL)
4940 return false;
4941
4942 /* If DECL already has its assembler name set, it does not need a
4943 new one. */
4944 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
4945 || DECL_ASSEMBLER_NAME_SET_P (decl))
4946 return false;
4947
4948 /* Abstract decls do not need an assembler name. */
4949 if (DECL_ABSTRACT (decl))
4950 return false;
4951
4952 /* For VAR_DECLs, only static, public and external symbols need an
4953 assembler name. */
4954 if (TREE_CODE (decl) == VAR_DECL
4955 && !TREE_STATIC (decl)
4956 && !TREE_PUBLIC (decl)
4957 && !DECL_EXTERNAL (decl))
4958 return false;
4959
4960 if (TREE_CODE (decl) == FUNCTION_DECL)
4961 {
4962 /* Do not set assembler name on builtins. Allow RTL expansion to
4963 decide whether to expand inline or via a regular call. */
4964 if (DECL_BUILT_IN (decl)
4965 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
4966 return false;
4967
4968 /* Functions represented in the callgraph need an assembler name. */
4969 if (cgraph_get_node (decl) != NULL)
4970 return true;
4971
4972 /* Unused and not public functions don't need an assembler name. */
4973 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
4974 return false;
4975 }
4976
4977 return true;
4978 }
4979
4980
4981 /* Reset all language specific information still present in symbol
4982 DECL. */
4983
4984 static void
4985 free_lang_data_in_decl (tree decl)
4986 {
4987 gcc_assert (DECL_P (decl));
4988
4989 /* Give the FE a chance to remove its own data first. */
4990 lang_hooks.free_lang_data (decl);
4991
4992 TREE_LANG_FLAG_0 (decl) = 0;
4993 TREE_LANG_FLAG_1 (decl) = 0;
4994 TREE_LANG_FLAG_2 (decl) = 0;
4995 TREE_LANG_FLAG_3 (decl) = 0;
4996 TREE_LANG_FLAG_4 (decl) = 0;
4997 TREE_LANG_FLAG_5 (decl) = 0;
4998 TREE_LANG_FLAG_6 (decl) = 0;
4999
5000 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5001 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5002 if (TREE_CODE (decl) == FIELD_DECL)
5003 {
5004 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5005 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5006 DECL_QUALIFIER (decl) = NULL_TREE;
5007 }
5008
5009 if (TREE_CODE (decl) == FUNCTION_DECL)
5010 {
5011 struct cgraph_node *node;
5012 if (!(node = cgraph_get_node (decl))
5013 || (!node->definition && !node->clones))
5014 {
5015 if (node)
5016 cgraph_release_function_body (node);
5017 else
5018 {
5019 release_function_body (decl);
5020 DECL_ARGUMENTS (decl) = NULL;
5021 DECL_RESULT (decl) = NULL;
5022 DECL_INITIAL (decl) = error_mark_node;
5023 }
5024 }
5025 if (gimple_has_body_p (decl))
5026 {
5027 tree t;
5028
5029 /* If DECL has a gimple body, then the context for its
5030 arguments must be DECL. Otherwise, it doesn't really
5031 matter, as we will not be emitting any code for DECL. In
5032 general, there may be other instances of DECL created by
5033 the front end and since PARM_DECLs are generally shared,
5034 their DECL_CONTEXT changes as the replicas of DECL are
5035 created. The only time where DECL_CONTEXT is important
5036 is for the FUNCTION_DECLs that have a gimple body (since
5037 the PARM_DECL will be used in the function's body). */
5038 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5039 DECL_CONTEXT (t) = decl;
5040 }
5041
5042 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5043 At this point, it is not needed anymore. */
5044 DECL_SAVED_TREE (decl) = NULL_TREE;
5045
5046 /* Clear the abstract origin if it refers to a method. Otherwise
5047 dwarf2out.c will ICE as we clear TYPE_METHODS and thus the
5048 origin will not be output correctly. */
5049 if (DECL_ABSTRACT_ORIGIN (decl)
5050 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5051 && RECORD_OR_UNION_TYPE_P
5052 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5053 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5054
5055 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5056 DECL_VINDEX referring to itself into a vtable slot number as it
5057 should. Happens with functions that are copied and then forgotten
5058 about. Just clear it, it won't matter anymore. */
5059 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl)))
5060 DECL_VINDEX (decl) = NULL_TREE;
5061 }
5062 else if (TREE_CODE (decl) == VAR_DECL)
5063 {
5064 if ((DECL_EXTERNAL (decl)
5065 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5066 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5067 DECL_INITIAL (decl) = NULL_TREE;
5068 }
5069 else if (TREE_CODE (decl) == TYPE_DECL
5070 || TREE_CODE (decl) == FIELD_DECL)
5071 DECL_INITIAL (decl) = NULL_TREE;
5072 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5073 && DECL_INITIAL (decl)
5074 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5075 {
5076 /* Strip builtins from the translation-unit BLOCK. We still have targets
5077 without builtin_decl_explicit support and also builtins are shared
5078 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5079 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5080 while (*nextp)
5081 {
5082 tree var = *nextp;
5083 if (TREE_CODE (var) == FUNCTION_DECL
5084 && DECL_BUILT_IN (var))
5085 *nextp = TREE_CHAIN (var);
5086 else
5087 nextp = &TREE_CHAIN (var);
5088 }
5089 }
5090 }
5091
5092
5093 /* Data used when collecting DECLs and TYPEs for language data removal. */
5094
5095 struct free_lang_data_d
5096 {
5097 /* Worklist to avoid excessive recursion. */
5098 vec<tree> worklist;
5099
5100 /* Set of traversed objects. Used to avoid duplicate visits. */
5101 struct pointer_set_t *pset;
5102
5103 /* Array of symbols to process with free_lang_data_in_decl. */
5104 vec<tree> decls;
5105
5106 /* Array of types to process with free_lang_data_in_type. */
5107 vec<tree> types;
5108 };
5109
5110
5111 /* Save all language fields needed to generate proper debug information
5112 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5113
5114 static void
5115 save_debug_info_for_decl (tree t)
5116 {
5117 /*struct saved_debug_info_d *sdi;*/
5118
5119 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t));
5120
5121 /* FIXME. Partial implementation for saving debug info removed. */
5122 }
5123
5124
5125 /* Save all language fields needed to generate proper debug information
5126 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5127
5128 static void
5129 save_debug_info_for_type (tree t)
5130 {
5131 /*struct saved_debug_info_d *sdi;*/
5132
5133 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t));
5134
5135 /* FIXME. Partial implementation for saving debug info removed. */
5136 }
5137
5138
5139 /* Add type or decl T to one of the list of tree nodes that need their
5140 language data removed. The lists are held inside FLD. */
5141
5142 static void
5143 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5144 {
5145 if (DECL_P (t))
5146 {
5147 fld->decls.safe_push (t);
5148 if (debug_info_level > DINFO_LEVEL_TERSE)
5149 save_debug_info_for_decl (t);
5150 }
5151 else if (TYPE_P (t))
5152 {
5153 fld->types.safe_push (t);
5154 if (debug_info_level > DINFO_LEVEL_TERSE)
5155 save_debug_info_for_type (t);
5156 }
5157 else
5158 gcc_unreachable ();
5159 }
5160
5161 /* Push tree node T into FLD->WORKLIST. */
5162
5163 static inline void
5164 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5165 {
5166 if (t && !is_lang_specific (t) && !pointer_set_contains (fld->pset, t))
5167 fld->worklist.safe_push ((t));
5168 }
5169
5170
5171 /* Operand callback helper for free_lang_data_in_node. *TP is the
5172 subtree operand being considered. */
5173
5174 static tree
5175 find_decls_types_r (tree *tp, int *ws, void *data)
5176 {
5177 tree t = *tp;
5178 struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5179
5180 if (TREE_CODE (t) == TREE_LIST)
5181 return NULL_TREE;
5182
5183 /* Language specific nodes will be removed, so there is no need
5184 to gather anything under them. */
5185 if (is_lang_specific (t))
5186 {
5187 *ws = 0;
5188 return NULL_TREE;
5189 }
5190
5191 if (DECL_P (t))
5192 {
5193 /* Note that walk_tree does not traverse every possible field in
5194 decls, so we have to do our own traversals here. */
5195 add_tree_to_fld_list (t, fld);
5196
5197 fld_worklist_push (DECL_NAME (t), fld);
5198 fld_worklist_push (DECL_CONTEXT (t), fld);
5199 fld_worklist_push (DECL_SIZE (t), fld);
5200 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5201
5202 /* We are going to remove everything under DECL_INITIAL for
5203 TYPE_DECLs. No point walking them. */
5204 if (TREE_CODE (t) != TYPE_DECL)
5205 fld_worklist_push (DECL_INITIAL (t), fld);
5206
5207 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5208 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5209
5210 if (TREE_CODE (t) == FUNCTION_DECL)
5211 {
5212 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5213 fld_worklist_push (DECL_RESULT (t), fld);
5214 }
5215 else if (TREE_CODE (t) == TYPE_DECL)
5216 {
5217 fld_worklist_push (DECL_ARGUMENT_FLD (t), fld);
5218 fld_worklist_push (DECL_VINDEX (t), fld);
5219 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
5220 }
5221 else if (TREE_CODE (t) == FIELD_DECL)
5222 {
5223 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5224 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5225 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5226 fld_worklist_push (DECL_FCONTEXT (t), fld);
5227 }
5228 else if (TREE_CODE (t) == VAR_DECL)
5229 {
5230 fld_worklist_push (DECL_SECTION_NAME (t), fld);
5231 fld_worklist_push (DECL_COMDAT_GROUP (t), fld);
5232 }
5233
5234 if ((TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
5235 && DECL_HAS_VALUE_EXPR_P (t))
5236 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5237
5238 if (TREE_CODE (t) != FIELD_DECL
5239 && TREE_CODE (t) != TYPE_DECL)
5240 fld_worklist_push (TREE_CHAIN (t), fld);
5241 *ws = 0;
5242 }
5243 else if (TYPE_P (t))
5244 {
5245 /* Note that walk_tree does not traverse every possible field in
5246 types, so we have to do our own traversals here. */
5247 add_tree_to_fld_list (t, fld);
5248
5249 if (!RECORD_OR_UNION_TYPE_P (t))
5250 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5251 fld_worklist_push (TYPE_SIZE (t), fld);
5252 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5253 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5254 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5255 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5256 fld_worklist_push (TYPE_NAME (t), fld);
5257 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5258 them and thus do not and want not to reach unused pointer types
5259 this way. */
5260 if (!POINTER_TYPE_P (t))
5261 fld_worklist_push (TYPE_MINVAL (t), fld);
5262 if (!RECORD_OR_UNION_TYPE_P (t))
5263 fld_worklist_push (TYPE_MAXVAL (t), fld);
5264 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5265 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5266 do not and want not to reach unused variants this way. */
5267 if (TYPE_CONTEXT (t))
5268 {
5269 tree ctx = TYPE_CONTEXT (t);
5270 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5271 So push that instead. */
5272 while (ctx && TREE_CODE (ctx) == BLOCK)
5273 ctx = BLOCK_SUPERCONTEXT (ctx);
5274 fld_worklist_push (ctx, fld);
5275 }
5276 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5277 and want not to reach unused types this way. */
5278
5279 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5280 {
5281 unsigned i;
5282 tree tem;
5283 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5284 fld_worklist_push (TREE_TYPE (tem), fld);
5285 tem = BINFO_VIRTUALS (TYPE_BINFO (t));
5286 if (tem
5287 /* The Java FE overloads BINFO_VIRTUALS for its own purpose. */
5288 && TREE_CODE (tem) == TREE_LIST)
5289 do
5290 {
5291 fld_worklist_push (TREE_VALUE (tem), fld);
5292 tem = TREE_CHAIN (tem);
5293 }
5294 while (tem);
5295 }
5296 if (RECORD_OR_UNION_TYPE_P (t))
5297 {
5298 tree tem;
5299 /* Push all TYPE_FIELDS - there can be interleaving interesting
5300 and non-interesting things. */
5301 tem = TYPE_FIELDS (t);
5302 while (tem)
5303 {
5304 if (TREE_CODE (tem) == FIELD_DECL
5305 || TREE_CODE (tem) == TYPE_DECL)
5306 fld_worklist_push (tem, fld);
5307 tem = TREE_CHAIN (tem);
5308 }
5309 }
5310
5311 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5312 *ws = 0;
5313 }
5314 else if (TREE_CODE (t) == BLOCK)
5315 {
5316 tree tem;
5317 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
5318 fld_worklist_push (tem, fld);
5319 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5320 fld_worklist_push (tem, fld);
5321 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5322 }
5323
5324 if (TREE_CODE (t) != IDENTIFIER_NODE
5325 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5326 fld_worklist_push (TREE_TYPE (t), fld);
5327
5328 return NULL_TREE;
5329 }
5330
5331
5332 /* Find decls and types in T. */
5333
5334 static void
5335 find_decls_types (tree t, struct free_lang_data_d *fld)
5336 {
5337 while (1)
5338 {
5339 if (!pointer_set_contains (fld->pset, t))
5340 walk_tree (&t, find_decls_types_r, fld, fld->pset);
5341 if (fld->worklist.is_empty ())
5342 break;
5343 t = fld->worklist.pop ();
5344 }
5345 }
5346
5347 /* Translate all the types in LIST with the corresponding runtime
5348 types. */
5349
5350 static tree
5351 get_eh_types_for_runtime (tree list)
5352 {
5353 tree head, prev;
5354
5355 if (list == NULL_TREE)
5356 return NULL_TREE;
5357
5358 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5359 prev = head;
5360 list = TREE_CHAIN (list);
5361 while (list)
5362 {
5363 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5364 TREE_CHAIN (prev) = n;
5365 prev = TREE_CHAIN (prev);
5366 list = TREE_CHAIN (list);
5367 }
5368
5369 return head;
5370 }
5371
5372
5373 /* Find decls and types referenced in EH region R and store them in
5374 FLD->DECLS and FLD->TYPES. */
5375
5376 static void
5377 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5378 {
5379 switch (r->type)
5380 {
5381 case ERT_CLEANUP:
5382 break;
5383
5384 case ERT_TRY:
5385 {
5386 eh_catch c;
5387
5388 /* The types referenced in each catch must first be changed to the
5389 EH types used at runtime. This removes references to FE types
5390 in the region. */
5391 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5392 {
5393 c->type_list = get_eh_types_for_runtime (c->type_list);
5394 walk_tree (&c->type_list, find_decls_types_r, fld, fld->pset);
5395 }
5396 }
5397 break;
5398
5399 case ERT_ALLOWED_EXCEPTIONS:
5400 r->u.allowed.type_list
5401 = get_eh_types_for_runtime (r->u.allowed.type_list);
5402 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, fld->pset);
5403 break;
5404
5405 case ERT_MUST_NOT_THROW:
5406 walk_tree (&r->u.must_not_throw.failure_decl,
5407 find_decls_types_r, fld, fld->pset);
5408 break;
5409 }
5410 }
5411
5412
5413 /* Find decls and types referenced in cgraph node N and store them in
5414 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5415 look for *every* kind of DECL and TYPE node reachable from N,
5416 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5417 NAMESPACE_DECLs, etc). */
5418
5419 static void
5420 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5421 {
5422 basic_block bb;
5423 struct function *fn;
5424 unsigned ix;
5425 tree t;
5426
5427 find_decls_types (n->decl, fld);
5428
5429 if (!gimple_has_body_p (n->decl))
5430 return;
5431
5432 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5433
5434 fn = DECL_STRUCT_FUNCTION (n->decl);
5435
5436 /* Traverse locals. */
5437 FOR_EACH_LOCAL_DECL (fn, ix, t)
5438 find_decls_types (t, fld);
5439
5440 /* Traverse EH regions in FN. */
5441 {
5442 eh_region r;
5443 FOR_ALL_EH_REGION_FN (r, fn)
5444 find_decls_types_in_eh_region (r, fld);
5445 }
5446
5447 /* Traverse every statement in FN. */
5448 FOR_EACH_BB_FN (bb, fn)
5449 {
5450 gimple_stmt_iterator si;
5451 unsigned i;
5452
5453 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
5454 {
5455 gimple phi = gsi_stmt (si);
5456
5457 for (i = 0; i < gimple_phi_num_args (phi); i++)
5458 {
5459 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5460 find_decls_types (*arg_p, fld);
5461 }
5462 }
5463
5464 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5465 {
5466 gimple stmt = gsi_stmt (si);
5467
5468 if (is_gimple_call (stmt))
5469 find_decls_types (gimple_call_fntype (stmt), fld);
5470
5471 for (i = 0; i < gimple_num_ops (stmt); i++)
5472 {
5473 tree arg = gimple_op (stmt, i);
5474 find_decls_types (arg, fld);
5475 }
5476 }
5477 }
5478 }
5479
5480
5481 /* Find decls and types referenced in varpool node N and store them in
5482 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5483 look for *every* kind of DECL and TYPE node reachable from N,
5484 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5485 NAMESPACE_DECLs, etc). */
5486
5487 static void
5488 find_decls_types_in_var (struct varpool_node *v, struct free_lang_data_d *fld)
5489 {
5490 find_decls_types (v->decl, fld);
5491 }
5492
5493 /* If T needs an assembler name, have one created for it. */
5494
5495 void
5496 assign_assembler_name_if_neeeded (tree t)
5497 {
5498 if (need_assembler_name_p (t))
5499 {
5500 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5501 diagnostics that use input_location to show locus
5502 information. The problem here is that, at this point,
5503 input_location is generally anchored to the end of the file
5504 (since the parser is long gone), so we don't have a good
5505 position to pin it to.
5506
5507 To alleviate this problem, this uses the location of T's
5508 declaration. Examples of this are
5509 testsuite/g++.dg/template/cond2.C and
5510 testsuite/g++.dg/template/pr35240.C. */
5511 location_t saved_location = input_location;
5512 input_location = DECL_SOURCE_LOCATION (t);
5513
5514 decl_assembler_name (t);
5515
5516 input_location = saved_location;
5517 }
5518 }
5519
5520
5521 /* Free language specific information for every operand and expression
5522 in every node of the call graph. This process operates in three stages:
5523
5524 1- Every callgraph node and varpool node is traversed looking for
5525 decls and types embedded in them. This is a more exhaustive
5526 search than that done by find_referenced_vars, because it will
5527 also collect individual fields, decls embedded in types, etc.
5528
5529 2- All the decls found are sent to free_lang_data_in_decl.
5530
5531 3- All the types found are sent to free_lang_data_in_type.
5532
5533 The ordering between decls and types is important because
5534 free_lang_data_in_decl sets assembler names, which includes
5535 mangling. So types cannot be freed up until assembler names have
5536 been set up. */
5537
5538 static void
5539 free_lang_data_in_cgraph (void)
5540 {
5541 struct cgraph_node *n;
5542 struct varpool_node *v;
5543 struct free_lang_data_d fld;
5544 tree t;
5545 unsigned i;
5546 alias_pair *p;
5547
5548 /* Initialize sets and arrays to store referenced decls and types. */
5549 fld.pset = pointer_set_create ();
5550 fld.worklist.create (0);
5551 fld.decls.create (100);
5552 fld.types.create (100);
5553
5554 /* Find decls and types in the body of every function in the callgraph. */
5555 FOR_EACH_FUNCTION (n)
5556 find_decls_types_in_node (n, &fld);
5557
5558 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
5559 find_decls_types (p->decl, &fld);
5560
5561 /* Find decls and types in every varpool symbol. */
5562 FOR_EACH_VARIABLE (v)
5563 find_decls_types_in_var (v, &fld);
5564
5565 /* Set the assembler name on every decl found. We need to do this
5566 now because free_lang_data_in_decl will invalidate data needed
5567 for mangling. This breaks mangling on interdependent decls. */
5568 FOR_EACH_VEC_ELT (fld.decls, i, t)
5569 assign_assembler_name_if_neeeded (t);
5570
5571 /* Traverse every decl found freeing its language data. */
5572 FOR_EACH_VEC_ELT (fld.decls, i, t)
5573 free_lang_data_in_decl (t);
5574
5575 /* Traverse every type found freeing its language data. */
5576 FOR_EACH_VEC_ELT (fld.types, i, t)
5577 free_lang_data_in_type (t);
5578
5579 pointer_set_destroy (fld.pset);
5580 fld.worklist.release ();
5581 fld.decls.release ();
5582 fld.types.release ();
5583 }
5584
5585
5586 /* Free resources that are used by FE but are not needed once they are done. */
5587
5588 static unsigned
5589 free_lang_data (void)
5590 {
5591 unsigned i;
5592
5593 /* If we are the LTO frontend we have freed lang-specific data already. */
5594 if (in_lto_p
5595 || !flag_generate_lto)
5596 return 0;
5597
5598 /* Allocate and assign alias sets to the standard integer types
5599 while the slots are still in the way the frontends generated them. */
5600 for (i = 0; i < itk_none; ++i)
5601 if (integer_types[i])
5602 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
5603
5604 /* Traverse the IL resetting language specific information for
5605 operands, expressions, etc. */
5606 free_lang_data_in_cgraph ();
5607
5608 /* Create gimple variants for common types. */
5609 ptrdiff_type_node = integer_type_node;
5610 fileptr_type_node = ptr_type_node;
5611
5612 /* Reset some langhooks. Do not reset types_compatible_p, it may
5613 still be used indirectly via the get_alias_set langhook. */
5614 lang_hooks.dwarf_name = lhd_dwarf_name;
5615 lang_hooks.decl_printable_name = gimple_decl_printable_name;
5616 /* We do not want the default decl_assembler_name implementation,
5617 rather if we have fixed everything we want a wrapper around it
5618 asserting that all non-local symbols already got their assembler
5619 name and only produce assembler names for local symbols. Or rather
5620 make sure we never call decl_assembler_name on local symbols and
5621 devise a separate, middle-end private scheme for it. */
5622
5623 /* Reset diagnostic machinery. */
5624 tree_diagnostics_defaults (global_dc);
5625
5626 return 0;
5627 }
5628
5629
5630 namespace {
5631
5632 const pass_data pass_data_ipa_free_lang_data =
5633 {
5634 SIMPLE_IPA_PASS, /* type */
5635 "*free_lang_data", /* name */
5636 OPTGROUP_NONE, /* optinfo_flags */
5637 false, /* has_gate */
5638 true, /* has_execute */
5639 TV_IPA_FREE_LANG_DATA, /* tv_id */
5640 0, /* properties_required */
5641 0, /* properties_provided */
5642 0, /* properties_destroyed */
5643 0, /* todo_flags_start */
5644 0, /* todo_flags_finish */
5645 };
5646
5647 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
5648 {
5649 public:
5650 pass_ipa_free_lang_data (gcc::context *ctxt)
5651 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
5652 {}
5653
5654 /* opt_pass methods: */
5655 unsigned int execute () { return free_lang_data (); }
5656
5657 }; // class pass_ipa_free_lang_data
5658
5659 } // anon namespace
5660
5661 simple_ipa_opt_pass *
5662 make_pass_ipa_free_lang_data (gcc::context *ctxt)
5663 {
5664 return new pass_ipa_free_lang_data (ctxt);
5665 }
5666
5667 /* The backbone of is_attribute_p(). ATTR_LEN is the string length of
5668 ATTR_NAME. Also used internally by remove_attribute(). */
5669 bool
5670 private_is_attribute_p (const char *attr_name, size_t attr_len, const_tree ident)
5671 {
5672 size_t ident_len = IDENTIFIER_LENGTH (ident);
5673
5674 if (ident_len == attr_len)
5675 {
5676 if (strcmp (attr_name, IDENTIFIER_POINTER (ident)) == 0)
5677 return true;
5678 }
5679 else if (ident_len == attr_len + 4)
5680 {
5681 /* There is the possibility that ATTR is 'text' and IDENT is
5682 '__text__'. */
5683 const char *p = IDENTIFIER_POINTER (ident);
5684 if (p[0] == '_' && p[1] == '_'
5685 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5686 && strncmp (attr_name, p + 2, attr_len) == 0)
5687 return true;
5688 }
5689
5690 return false;
5691 }
5692
5693 /* The backbone of lookup_attribute(). ATTR_LEN is the string length
5694 of ATTR_NAME, and LIST is not NULL_TREE. */
5695 tree
5696 private_lookup_attribute (const char *attr_name, size_t attr_len, tree list)
5697 {
5698 while (list)
5699 {
5700 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5701
5702 if (ident_len == attr_len)
5703 {
5704 if (!strcmp (attr_name,
5705 IDENTIFIER_POINTER (get_attribute_name (list))))
5706 break;
5707 }
5708 /* TODO: If we made sure that attributes were stored in the
5709 canonical form without '__...__' (ie, as in 'text' as opposed
5710 to '__text__') then we could avoid the following case. */
5711 else if (ident_len == attr_len + 4)
5712 {
5713 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5714 if (p[0] == '_' && p[1] == '_'
5715 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5716 && strncmp (attr_name, p + 2, attr_len) == 0)
5717 break;
5718 }
5719 list = TREE_CHAIN (list);
5720 }
5721
5722 return list;
5723 }
5724
5725 /* A variant of lookup_attribute() that can be used with an identifier
5726 as the first argument, and where the identifier can be either
5727 'text' or '__text__'.
5728
5729 Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST,
5730 return a pointer to the attribute's list element if the attribute
5731 is part of the list, or NULL_TREE if not found. If the attribute
5732 appears more than once, this only returns the first occurrence; the
5733 TREE_CHAIN of the return value should be passed back in if further
5734 occurrences are wanted. ATTR_IDENTIFIER must be an identifier but
5735 can be in the form 'text' or '__text__'. */
5736 static tree
5737 lookup_ident_attribute (tree attr_identifier, tree list)
5738 {
5739 gcc_checking_assert (TREE_CODE (attr_identifier) == IDENTIFIER_NODE);
5740
5741 while (list)
5742 {
5743 gcc_checking_assert (TREE_CODE (get_attribute_name (list))
5744 == IDENTIFIER_NODE);
5745
5746 /* Identifiers can be compared directly for equality. */
5747 if (attr_identifier == get_attribute_name (list))
5748 break;
5749
5750 /* If they are not equal, they may still be one in the form
5751 'text' while the other one is in the form '__text__'. TODO:
5752 If we were storing attributes in normalized 'text' form, then
5753 this could all go away and we could take full advantage of
5754 the fact that we're comparing identifiers. :-) */
5755 {
5756 size_t attr_len = IDENTIFIER_LENGTH (attr_identifier);
5757 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5758
5759 if (ident_len == attr_len + 4)
5760 {
5761 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5762 const char *q = IDENTIFIER_POINTER (attr_identifier);
5763 if (p[0] == '_' && p[1] == '_'
5764 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5765 && strncmp (q, p + 2, attr_len) == 0)
5766 break;
5767 }
5768 else if (ident_len + 4 == attr_len)
5769 {
5770 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5771 const char *q = IDENTIFIER_POINTER (attr_identifier);
5772 if (q[0] == '_' && q[1] == '_'
5773 && q[attr_len - 2] == '_' && q[attr_len - 1] == '_'
5774 && strncmp (q + 2, p, ident_len) == 0)
5775 break;
5776 }
5777 }
5778 list = TREE_CHAIN (list);
5779 }
5780
5781 return list;
5782 }
5783
5784 /* Remove any instances of attribute ATTR_NAME in LIST and return the
5785 modified list. */
5786
5787 tree
5788 remove_attribute (const char *attr_name, tree list)
5789 {
5790 tree *p;
5791 size_t attr_len = strlen (attr_name);
5792
5793 gcc_checking_assert (attr_name[0] != '_');
5794
5795 for (p = &list; *p; )
5796 {
5797 tree l = *p;
5798 /* TODO: If we were storing attributes in normalized form, here
5799 we could use a simple strcmp(). */
5800 if (private_is_attribute_p (attr_name, attr_len, get_attribute_name (l)))
5801 *p = TREE_CHAIN (l);
5802 else
5803 p = &TREE_CHAIN (l);
5804 }
5805
5806 return list;
5807 }
5808
5809 /* Return an attribute list that is the union of a1 and a2. */
5810
5811 tree
5812 merge_attributes (tree a1, tree a2)
5813 {
5814 tree attributes;
5815
5816 /* Either one unset? Take the set one. */
5817
5818 if ((attributes = a1) == 0)
5819 attributes = a2;
5820
5821 /* One that completely contains the other? Take it. */
5822
5823 else if (a2 != 0 && ! attribute_list_contained (a1, a2))
5824 {
5825 if (attribute_list_contained (a2, a1))
5826 attributes = a2;
5827 else
5828 {
5829 /* Pick the longest list, and hang on the other list. */
5830
5831 if (list_length (a1) < list_length (a2))
5832 attributes = a2, a2 = a1;
5833
5834 for (; a2 != 0; a2 = TREE_CHAIN (a2))
5835 {
5836 tree a;
5837 for (a = lookup_ident_attribute (get_attribute_name (a2),
5838 attributes);
5839 a != NULL_TREE && !attribute_value_equal (a, a2);
5840 a = lookup_ident_attribute (get_attribute_name (a2),
5841 TREE_CHAIN (a)))
5842 ;
5843 if (a == NULL_TREE)
5844 {
5845 a1 = copy_node (a2);
5846 TREE_CHAIN (a1) = attributes;
5847 attributes = a1;
5848 }
5849 }
5850 }
5851 }
5852 return attributes;
5853 }
5854
5855 /* Given types T1 and T2, merge their attributes and return
5856 the result. */
5857
5858 tree
5859 merge_type_attributes (tree t1, tree t2)
5860 {
5861 return merge_attributes (TYPE_ATTRIBUTES (t1),
5862 TYPE_ATTRIBUTES (t2));
5863 }
5864
5865 /* Given decls OLDDECL and NEWDECL, merge their attributes and return
5866 the result. */
5867
5868 tree
5869 merge_decl_attributes (tree olddecl, tree newdecl)
5870 {
5871 return merge_attributes (DECL_ATTRIBUTES (olddecl),
5872 DECL_ATTRIBUTES (newdecl));
5873 }
5874
5875 #if TARGET_DLLIMPORT_DECL_ATTRIBUTES
5876
5877 /* Specialization of merge_decl_attributes for various Windows targets.
5878
5879 This handles the following situation:
5880
5881 __declspec (dllimport) int foo;
5882 int foo;
5883
5884 The second instance of `foo' nullifies the dllimport. */
5885
5886 tree
5887 merge_dllimport_decl_attributes (tree old, tree new_tree)
5888 {
5889 tree a;
5890 int delete_dllimport_p = 1;
5891
5892 /* What we need to do here is remove from `old' dllimport if it doesn't
5893 appear in `new'. dllimport behaves like extern: if a declaration is
5894 marked dllimport and a definition appears later, then the object
5895 is not dllimport'd. We also remove a `new' dllimport if the old list
5896 contains dllexport: dllexport always overrides dllimport, regardless
5897 of the order of declaration. */
5898 if (!VAR_OR_FUNCTION_DECL_P (new_tree))
5899 delete_dllimport_p = 0;
5900 else if (DECL_DLLIMPORT_P (new_tree)
5901 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old)))
5902 {
5903 DECL_DLLIMPORT_P (new_tree) = 0;
5904 warning (OPT_Wattributes, "%q+D already declared with dllexport attribute: "
5905 "dllimport ignored", new_tree);
5906 }
5907 else if (DECL_DLLIMPORT_P (old) && !DECL_DLLIMPORT_P (new_tree))
5908 {
5909 /* Warn about overriding a symbol that has already been used, e.g.:
5910 extern int __attribute__ ((dllimport)) foo;
5911 int* bar () {return &foo;}
5912 int foo;
5913 */
5914 if (TREE_USED (old))
5915 {
5916 warning (0, "%q+D redeclared without dllimport attribute "
5917 "after being referenced with dll linkage", new_tree);
5918 /* If we have used a variable's address with dllimport linkage,
5919 keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the
5920 decl may already have had TREE_CONSTANT computed.
5921 We still remove the attribute so that assembler code refers
5922 to '&foo rather than '_imp__foo'. */
5923 if (TREE_CODE (old) == VAR_DECL && TREE_ADDRESSABLE (old))
5924 DECL_DLLIMPORT_P (new_tree) = 1;
5925 }
5926
5927 /* Let an inline definition silently override the external reference,
5928 but otherwise warn about attribute inconsistency. */
5929 else if (TREE_CODE (new_tree) == VAR_DECL
5930 || !DECL_DECLARED_INLINE_P (new_tree))
5931 warning (OPT_Wattributes, "%q+D redeclared without dllimport attribute: "
5932 "previous dllimport ignored", new_tree);
5933 }
5934 else
5935 delete_dllimport_p = 0;
5936
5937 a = merge_attributes (DECL_ATTRIBUTES (old), DECL_ATTRIBUTES (new_tree));
5938
5939 if (delete_dllimport_p)
5940 a = remove_attribute ("dllimport", a);
5941
5942 return a;
5943 }
5944
5945 /* Handle a "dllimport" or "dllexport" attribute; arguments as in
5946 struct attribute_spec.handler. */
5947
5948 tree
5949 handle_dll_attribute (tree * pnode, tree name, tree args, int flags,
5950 bool *no_add_attrs)
5951 {
5952 tree node = *pnode;
5953 bool is_dllimport;
5954
5955 /* These attributes may apply to structure and union types being created,
5956 but otherwise should pass to the declaration involved. */
5957 if (!DECL_P (node))
5958 {
5959 if (flags & ((int) ATTR_FLAG_DECL_NEXT | (int) ATTR_FLAG_FUNCTION_NEXT
5960 | (int) ATTR_FLAG_ARRAY_NEXT))
5961 {
5962 *no_add_attrs = true;
5963 return tree_cons (name, args, NULL_TREE);
5964 }
5965 if (TREE_CODE (node) == RECORD_TYPE
5966 || TREE_CODE (node) == UNION_TYPE)
5967 {
5968 node = TYPE_NAME (node);
5969 if (!node)
5970 return NULL_TREE;
5971 }
5972 else
5973 {
5974 warning (OPT_Wattributes, "%qE attribute ignored",
5975 name);
5976 *no_add_attrs = true;
5977 return NULL_TREE;
5978 }
5979 }
5980
5981 if (TREE_CODE (node) != FUNCTION_DECL
5982 && TREE_CODE (node) != VAR_DECL
5983 && TREE_CODE (node) != TYPE_DECL)
5984 {
5985 *no_add_attrs = true;
5986 warning (OPT_Wattributes, "%qE attribute ignored",
5987 name);
5988 return NULL_TREE;
5989 }
5990
5991 if (TREE_CODE (node) == TYPE_DECL
5992 && TREE_CODE (TREE_TYPE (node)) != RECORD_TYPE
5993 && TREE_CODE (TREE_TYPE (node)) != UNION_TYPE)
5994 {
5995 *no_add_attrs = true;
5996 warning (OPT_Wattributes, "%qE attribute ignored",
5997 name);
5998 return NULL_TREE;
5999 }
6000
6001 is_dllimport = is_attribute_p ("dllimport", name);
6002
6003 /* Report error on dllimport ambiguities seen now before they cause
6004 any damage. */
6005 if (is_dllimport)
6006 {
6007 /* Honor any target-specific overrides. */
6008 if (!targetm.valid_dllimport_attribute_p (node))
6009 *no_add_attrs = true;
6010
6011 else if (TREE_CODE (node) == FUNCTION_DECL
6012 && DECL_DECLARED_INLINE_P (node))
6013 {
6014 warning (OPT_Wattributes, "inline function %q+D declared as "
6015 " dllimport: attribute ignored", node);
6016 *no_add_attrs = true;
6017 }
6018 /* Like MS, treat definition of dllimported variables and
6019 non-inlined functions on declaration as syntax errors. */
6020 else if (TREE_CODE (node) == FUNCTION_DECL && DECL_INITIAL (node))
6021 {
6022 error ("function %q+D definition is marked dllimport", node);
6023 *no_add_attrs = true;
6024 }
6025
6026 else if (TREE_CODE (node) == VAR_DECL)
6027 {
6028 if (DECL_INITIAL (node))
6029 {
6030 error ("variable %q+D definition is marked dllimport",
6031 node);
6032 *no_add_attrs = true;
6033 }
6034
6035 /* `extern' needn't be specified with dllimport.
6036 Specify `extern' now and hope for the best. Sigh. */
6037 DECL_EXTERNAL (node) = 1;
6038 /* Also, implicitly give dllimport'd variables declared within
6039 a function global scope, unless declared static. */
6040 if (current_function_decl != NULL_TREE && !TREE_STATIC (node))
6041 TREE_PUBLIC (node) = 1;
6042 }
6043
6044 if (*no_add_attrs == false)
6045 DECL_DLLIMPORT_P (node) = 1;
6046 }
6047 else if (TREE_CODE (node) == FUNCTION_DECL
6048 && DECL_DECLARED_INLINE_P (node)
6049 && flag_keep_inline_dllexport)
6050 /* An exported function, even if inline, must be emitted. */
6051 DECL_EXTERNAL (node) = 0;
6052
6053 /* Report error if symbol is not accessible at global scope. */
6054 if (!TREE_PUBLIC (node)
6055 && (TREE_CODE (node) == VAR_DECL
6056 || TREE_CODE (node) == FUNCTION_DECL))
6057 {
6058 error ("external linkage required for symbol %q+D because of "
6059 "%qE attribute", node, name);
6060 *no_add_attrs = true;
6061 }
6062
6063 /* A dllexport'd entity must have default visibility so that other
6064 program units (shared libraries or the main executable) can see
6065 it. A dllimport'd entity must have default visibility so that
6066 the linker knows that undefined references within this program
6067 unit can be resolved by the dynamic linker. */
6068 if (!*no_add_attrs)
6069 {
6070 if (DECL_VISIBILITY_SPECIFIED (node)
6071 && DECL_VISIBILITY (node) != VISIBILITY_DEFAULT)
6072 error ("%qE implies default visibility, but %qD has already "
6073 "been declared with a different visibility",
6074 name, node);
6075 DECL_VISIBILITY (node) = VISIBILITY_DEFAULT;
6076 DECL_VISIBILITY_SPECIFIED (node) = 1;
6077 }
6078
6079 return NULL_TREE;
6080 }
6081
6082 #endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */
6083 \f
6084 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6085 of the various TYPE_QUAL values. */
6086
6087 static void
6088 set_type_quals (tree type, int type_quals)
6089 {
6090 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6091 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6092 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6093 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6094 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6095 }
6096
6097 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6098
6099 bool
6100 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6101 {
6102 return (TYPE_QUALS (cand) == type_quals
6103 && TYPE_NAME (cand) == TYPE_NAME (base)
6104 /* Apparently this is needed for Objective-C. */
6105 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6106 /* Check alignment. */
6107 && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
6108 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6109 TYPE_ATTRIBUTES (base)));
6110 }
6111
6112 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6113
6114 static bool
6115 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6116 {
6117 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6118 && TYPE_NAME (cand) == TYPE_NAME (base)
6119 /* Apparently this is needed for Objective-C. */
6120 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6121 /* Check alignment. */
6122 && TYPE_ALIGN (cand) == align
6123 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6124 TYPE_ATTRIBUTES (base)));
6125 }
6126
6127 /* This function checks to see if TYPE matches the size one of the built-in
6128 atomic types, and returns that core atomic type. */
6129
6130 static tree
6131 find_atomic_core_type (tree type)
6132 {
6133 tree base_atomic_type;
6134
6135 /* Only handle complete types. */
6136 if (TYPE_SIZE (type) == NULL_TREE)
6137 return NULL_TREE;
6138
6139 HOST_WIDE_INT type_size = tree_to_uhwi (TYPE_SIZE (type));
6140 switch (type_size)
6141 {
6142 case 8:
6143 base_atomic_type = atomicQI_type_node;
6144 break;
6145
6146 case 16:
6147 base_atomic_type = atomicHI_type_node;
6148 break;
6149
6150 case 32:
6151 base_atomic_type = atomicSI_type_node;
6152 break;
6153
6154 case 64:
6155 base_atomic_type = atomicDI_type_node;
6156 break;
6157
6158 case 128:
6159 base_atomic_type = atomicTI_type_node;
6160 break;
6161
6162 default:
6163 base_atomic_type = NULL_TREE;
6164 }
6165
6166 return base_atomic_type;
6167 }
6168
6169 /* Return a version of the TYPE, qualified as indicated by the
6170 TYPE_QUALS, if one exists. If no qualified version exists yet,
6171 return NULL_TREE. */
6172
6173 tree
6174 get_qualified_type (tree type, int type_quals)
6175 {
6176 tree t;
6177
6178 if (TYPE_QUALS (type) == type_quals)
6179 return type;
6180
6181 /* Search the chain of variants to see if there is already one there just
6182 like the one we need to have. If so, use that existing one. We must
6183 preserve the TYPE_NAME, since there is code that depends on this. */
6184 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6185 if (check_qualified_type (t, type, type_quals))
6186 return t;
6187
6188 return NULL_TREE;
6189 }
6190
6191 /* Like get_qualified_type, but creates the type if it does not
6192 exist. This function never returns NULL_TREE. */
6193
6194 tree
6195 build_qualified_type (tree type, int type_quals)
6196 {
6197 tree t;
6198
6199 /* See if we already have the appropriate qualified variant. */
6200 t = get_qualified_type (type, type_quals);
6201
6202 /* If not, build it. */
6203 if (!t)
6204 {
6205 t = build_variant_type_copy (type);
6206 set_type_quals (t, type_quals);
6207
6208 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6209 {
6210 /* See if this object can map to a basic atomic type. */
6211 tree atomic_type = find_atomic_core_type (type);
6212 if (atomic_type)
6213 {
6214 /* Ensure the alignment of this type is compatible with
6215 the required alignment of the atomic type. */
6216 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6217 TYPE_ALIGN (t) = TYPE_ALIGN (atomic_type);
6218 }
6219 }
6220
6221 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6222 /* Propagate structural equality. */
6223 SET_TYPE_STRUCTURAL_EQUALITY (t);
6224 else if (TYPE_CANONICAL (type) != type)
6225 /* Build the underlying canonical type, since it is different
6226 from TYPE. */
6227 TYPE_CANONICAL (t) = build_qualified_type (TYPE_CANONICAL (type),
6228 type_quals);
6229 else
6230 /* T is its own canonical type. */
6231 TYPE_CANONICAL (t) = t;
6232
6233 }
6234
6235 return t;
6236 }
6237
6238 /* Create a variant of type T with alignment ALIGN. */
6239
6240 tree
6241 build_aligned_type (tree type, unsigned int align)
6242 {
6243 tree t;
6244
6245 if (TYPE_PACKED (type)
6246 || TYPE_ALIGN (type) == align)
6247 return type;
6248
6249 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6250 if (check_aligned_type (t, type, align))
6251 return t;
6252
6253 t = build_variant_type_copy (type);
6254 TYPE_ALIGN (t) = align;
6255
6256 return t;
6257 }
6258
6259 /* Create a new distinct copy of TYPE. The new type is made its own
6260 MAIN_VARIANT. If TYPE requires structural equality checks, the
6261 resulting type requires structural equality checks; otherwise, its
6262 TYPE_CANONICAL points to itself. */
6263
6264 tree
6265 build_distinct_type_copy (tree type)
6266 {
6267 tree t = copy_node (type);
6268
6269 TYPE_POINTER_TO (t) = 0;
6270 TYPE_REFERENCE_TO (t) = 0;
6271
6272 /* Set the canonical type either to a new equivalence class, or
6273 propagate the need for structural equality checks. */
6274 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6275 SET_TYPE_STRUCTURAL_EQUALITY (t);
6276 else
6277 TYPE_CANONICAL (t) = t;
6278
6279 /* Make it its own variant. */
6280 TYPE_MAIN_VARIANT (t) = t;
6281 TYPE_NEXT_VARIANT (t) = 0;
6282
6283 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6284 whose TREE_TYPE is not t. This can also happen in the Ada
6285 frontend when using subtypes. */
6286
6287 return t;
6288 }
6289
6290 /* Create a new variant of TYPE, equivalent but distinct. This is so
6291 the caller can modify it. TYPE_CANONICAL for the return type will
6292 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6293 are considered equal by the language itself (or that both types
6294 require structural equality checks). */
6295
6296 tree
6297 build_variant_type_copy (tree type)
6298 {
6299 tree t, m = TYPE_MAIN_VARIANT (type);
6300
6301 t = build_distinct_type_copy (type);
6302
6303 /* Since we're building a variant, assume that it is a non-semantic
6304 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6305 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6306
6307 /* Add the new type to the chain of variants of TYPE. */
6308 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6309 TYPE_NEXT_VARIANT (m) = t;
6310 TYPE_MAIN_VARIANT (t) = m;
6311
6312 return t;
6313 }
6314 \f
6315 /* Return true if the from tree in both tree maps are equal. */
6316
6317 int
6318 tree_map_base_eq (const void *va, const void *vb)
6319 {
6320 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6321 *const b = (const struct tree_map_base *) vb;
6322 return (a->from == b->from);
6323 }
6324
6325 /* Hash a from tree in a tree_base_map. */
6326
6327 unsigned int
6328 tree_map_base_hash (const void *item)
6329 {
6330 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6331 }
6332
6333 /* Return true if this tree map structure is marked for garbage collection
6334 purposes. We simply return true if the from tree is marked, so that this
6335 structure goes away when the from tree goes away. */
6336
6337 int
6338 tree_map_base_marked_p (const void *p)
6339 {
6340 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6341 }
6342
6343 /* Hash a from tree in a tree_map. */
6344
6345 unsigned int
6346 tree_map_hash (const void *item)
6347 {
6348 return (((const struct tree_map *) item)->hash);
6349 }
6350
6351 /* Hash a from tree in a tree_decl_map. */
6352
6353 unsigned int
6354 tree_decl_map_hash (const void *item)
6355 {
6356 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6357 }
6358
6359 /* Return the initialization priority for DECL. */
6360
6361 priority_type
6362 decl_init_priority_lookup (tree decl)
6363 {
6364 struct tree_priority_map *h;
6365 struct tree_map_base in;
6366
6367 gcc_assert (VAR_OR_FUNCTION_DECL_P (decl));
6368 in.from = decl;
6369 h = (struct tree_priority_map *) htab_find (init_priority_for_decl, &in);
6370 return h ? h->init : DEFAULT_INIT_PRIORITY;
6371 }
6372
6373 /* Return the finalization priority for DECL. */
6374
6375 priority_type
6376 decl_fini_priority_lookup (tree decl)
6377 {
6378 struct tree_priority_map *h;
6379 struct tree_map_base in;
6380
6381 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
6382 in.from = decl;
6383 h = (struct tree_priority_map *) htab_find (init_priority_for_decl, &in);
6384 return h ? h->fini : DEFAULT_INIT_PRIORITY;
6385 }
6386
6387 /* Return the initialization and finalization priority information for
6388 DECL. If there is no previous priority information, a freshly
6389 allocated structure is returned. */
6390
6391 static struct tree_priority_map *
6392 decl_priority_info (tree decl)
6393 {
6394 struct tree_priority_map in;
6395 struct tree_priority_map *h;
6396 void **loc;
6397
6398 in.base.from = decl;
6399 loc = htab_find_slot (init_priority_for_decl, &in, INSERT);
6400 h = (struct tree_priority_map *) *loc;
6401 if (!h)
6402 {
6403 h = ggc_alloc_cleared_tree_priority_map ();
6404 *loc = h;
6405 h->base.from = decl;
6406 h->init = DEFAULT_INIT_PRIORITY;
6407 h->fini = DEFAULT_INIT_PRIORITY;
6408 }
6409
6410 return h;
6411 }
6412
6413 /* Set the initialization priority for DECL to PRIORITY. */
6414
6415 void
6416 decl_init_priority_insert (tree decl, priority_type priority)
6417 {
6418 struct tree_priority_map *h;
6419
6420 gcc_assert (VAR_OR_FUNCTION_DECL_P (decl));
6421 if (priority == DEFAULT_INIT_PRIORITY)
6422 return;
6423 h = decl_priority_info (decl);
6424 h->init = priority;
6425 }
6426
6427 /* Set the finalization priority for DECL to PRIORITY. */
6428
6429 void
6430 decl_fini_priority_insert (tree decl, priority_type priority)
6431 {
6432 struct tree_priority_map *h;
6433
6434 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
6435 if (priority == DEFAULT_INIT_PRIORITY)
6436 return;
6437 h = decl_priority_info (decl);
6438 h->fini = priority;
6439 }
6440
6441 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6442
6443 static void
6444 print_debug_expr_statistics (void)
6445 {
6446 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6447 (long) htab_size (debug_expr_for_decl),
6448 (long) htab_elements (debug_expr_for_decl),
6449 htab_collisions (debug_expr_for_decl));
6450 }
6451
6452 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6453
6454 static void
6455 print_value_expr_statistics (void)
6456 {
6457 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6458 (long) htab_size (value_expr_for_decl),
6459 (long) htab_elements (value_expr_for_decl),
6460 htab_collisions (value_expr_for_decl));
6461 }
6462
6463 /* Lookup a debug expression for FROM, and return it if we find one. */
6464
6465 tree
6466 decl_debug_expr_lookup (tree from)
6467 {
6468 struct tree_decl_map *h, in;
6469 in.base.from = from;
6470
6471 h = (struct tree_decl_map *)
6472 htab_find_with_hash (debug_expr_for_decl, &in, DECL_UID (from));
6473 if (h)
6474 return h->to;
6475 return NULL_TREE;
6476 }
6477
6478 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6479
6480 void
6481 decl_debug_expr_insert (tree from, tree to)
6482 {
6483 struct tree_decl_map *h;
6484 void **loc;
6485
6486 h = ggc_alloc_tree_decl_map ();
6487 h->base.from = from;
6488 h->to = to;
6489 loc = htab_find_slot_with_hash (debug_expr_for_decl, h, DECL_UID (from),
6490 INSERT);
6491 *(struct tree_decl_map **) loc = h;
6492 }
6493
6494 /* Lookup a value expression for FROM, and return it if we find one. */
6495
6496 tree
6497 decl_value_expr_lookup (tree from)
6498 {
6499 struct tree_decl_map *h, in;
6500 in.base.from = from;
6501
6502 h = (struct tree_decl_map *)
6503 htab_find_with_hash (value_expr_for_decl, &in, DECL_UID (from));
6504 if (h)
6505 return h->to;
6506 return NULL_TREE;
6507 }
6508
6509 /* Insert a mapping FROM->TO in the value expression hashtable. */
6510
6511 void
6512 decl_value_expr_insert (tree from, tree to)
6513 {
6514 struct tree_decl_map *h;
6515 void **loc;
6516
6517 h = ggc_alloc_tree_decl_map ();
6518 h->base.from = from;
6519 h->to = to;
6520 loc = htab_find_slot_with_hash (value_expr_for_decl, h, DECL_UID (from),
6521 INSERT);
6522 *(struct tree_decl_map **) loc = h;
6523 }
6524
6525 /* Lookup a vector of debug arguments for FROM, and return it if we
6526 find one. */
6527
6528 vec<tree, va_gc> **
6529 decl_debug_args_lookup (tree from)
6530 {
6531 struct tree_vec_map *h, in;
6532
6533 if (!DECL_HAS_DEBUG_ARGS_P (from))
6534 return NULL;
6535 gcc_checking_assert (debug_args_for_decl != NULL);
6536 in.base.from = from;
6537 h = (struct tree_vec_map *)
6538 htab_find_with_hash (debug_args_for_decl, &in, DECL_UID (from));
6539 if (h)
6540 return &h->to;
6541 return NULL;
6542 }
6543
6544 /* Insert a mapping FROM->empty vector of debug arguments in the value
6545 expression hashtable. */
6546
6547 vec<tree, va_gc> **
6548 decl_debug_args_insert (tree from)
6549 {
6550 struct tree_vec_map *h;
6551 void **loc;
6552
6553 if (DECL_HAS_DEBUG_ARGS_P (from))
6554 return decl_debug_args_lookup (from);
6555 if (debug_args_for_decl == NULL)
6556 debug_args_for_decl = htab_create_ggc (64, tree_vec_map_hash,
6557 tree_vec_map_eq, 0);
6558 h = ggc_alloc_tree_vec_map ();
6559 h->base.from = from;
6560 h->to = NULL;
6561 loc = htab_find_slot_with_hash (debug_args_for_decl, h, DECL_UID (from),
6562 INSERT);
6563 *(struct tree_vec_map **) loc = h;
6564 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6565 return &h->to;
6566 }
6567
6568 /* Hashing of types so that we don't make duplicates.
6569 The entry point is `type_hash_canon'. */
6570
6571 /* Compute a hash code for a list of types (chain of TREE_LIST nodes
6572 with types in the TREE_VALUE slots), by adding the hash codes
6573 of the individual types. */
6574
6575 static unsigned int
6576 type_hash_list (const_tree list, hashval_t hashcode)
6577 {
6578 const_tree tail;
6579
6580 for (tail = list; tail; tail = TREE_CHAIN (tail))
6581 if (TREE_VALUE (tail) != error_mark_node)
6582 hashcode = iterative_hash_object (TYPE_HASH (TREE_VALUE (tail)),
6583 hashcode);
6584
6585 return hashcode;
6586 }
6587
6588 /* These are the Hashtable callback functions. */
6589
6590 /* Returns true iff the types are equivalent. */
6591
6592 static int
6593 type_hash_eq (const void *va, const void *vb)
6594 {
6595 const struct type_hash *const a = (const struct type_hash *) va,
6596 *const b = (const struct type_hash *) vb;
6597
6598 /* First test the things that are the same for all types. */
6599 if (a->hash != b->hash
6600 || TREE_CODE (a->type) != TREE_CODE (b->type)
6601 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6602 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6603 TYPE_ATTRIBUTES (b->type))
6604 || (TREE_CODE (a->type) != COMPLEX_TYPE
6605 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6606 return 0;
6607
6608 /* Be careful about comparing arrays before and after the element type
6609 has been completed; don't compare TYPE_ALIGN unless both types are
6610 complete. */
6611 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6612 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6613 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6614 return 0;
6615
6616 switch (TREE_CODE (a->type))
6617 {
6618 case VOID_TYPE:
6619 case COMPLEX_TYPE:
6620 case POINTER_TYPE:
6621 case REFERENCE_TYPE:
6622 case NULLPTR_TYPE:
6623 return 1;
6624
6625 case VECTOR_TYPE:
6626 return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type);
6627
6628 case ENUMERAL_TYPE:
6629 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6630 && !(TYPE_VALUES (a->type)
6631 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6632 && TYPE_VALUES (b->type)
6633 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6634 && type_list_equal (TYPE_VALUES (a->type),
6635 TYPE_VALUES (b->type))))
6636 return 0;
6637
6638 /* ... fall through ... */
6639
6640 case INTEGER_TYPE:
6641 case REAL_TYPE:
6642 case BOOLEAN_TYPE:
6643 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6644 return false;
6645 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6646 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6647 TYPE_MAX_VALUE (b->type)))
6648 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6649 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6650 TYPE_MIN_VALUE (b->type))));
6651
6652 case FIXED_POINT_TYPE:
6653 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6654
6655 case OFFSET_TYPE:
6656 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6657
6658 case METHOD_TYPE:
6659 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6660 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6661 || (TYPE_ARG_TYPES (a->type)
6662 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6663 && TYPE_ARG_TYPES (b->type)
6664 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6665 && type_list_equal (TYPE_ARG_TYPES (a->type),
6666 TYPE_ARG_TYPES (b->type)))))
6667 break;
6668 return 0;
6669 case ARRAY_TYPE:
6670 return TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type);
6671
6672 case RECORD_TYPE:
6673 case UNION_TYPE:
6674 case QUAL_UNION_TYPE:
6675 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6676 || (TYPE_FIELDS (a->type)
6677 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6678 && TYPE_FIELDS (b->type)
6679 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6680 && type_list_equal (TYPE_FIELDS (a->type),
6681 TYPE_FIELDS (b->type))));
6682
6683 case FUNCTION_TYPE:
6684 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6685 || (TYPE_ARG_TYPES (a->type)
6686 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6687 && TYPE_ARG_TYPES (b->type)
6688 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6689 && type_list_equal (TYPE_ARG_TYPES (a->type),
6690 TYPE_ARG_TYPES (b->type))))
6691 break;
6692 return 0;
6693
6694 default:
6695 return 0;
6696 }
6697
6698 if (lang_hooks.types.type_hash_eq != NULL)
6699 return lang_hooks.types.type_hash_eq (a->type, b->type);
6700
6701 return 1;
6702 }
6703
6704 /* Return the cached hash value. */
6705
6706 static hashval_t
6707 type_hash_hash (const void *item)
6708 {
6709 return ((const struct type_hash *) item)->hash;
6710 }
6711
6712 /* Look in the type hash table for a type isomorphic to TYPE.
6713 If one is found, return it. Otherwise return 0. */
6714
6715 static tree
6716 type_hash_lookup (hashval_t hashcode, tree type)
6717 {
6718 struct type_hash *h, in;
6719
6720 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6721 must call that routine before comparing TYPE_ALIGNs. */
6722 layout_type (type);
6723
6724 in.hash = hashcode;
6725 in.type = type;
6726
6727 h = (struct type_hash *) htab_find_with_hash (type_hash_table, &in,
6728 hashcode);
6729 if (h)
6730 return h->type;
6731 return NULL_TREE;
6732 }
6733
6734 /* Add an entry to the type-hash-table
6735 for a type TYPE whose hash code is HASHCODE. */
6736
6737 static void
6738 type_hash_add (hashval_t hashcode, tree type)
6739 {
6740 struct type_hash *h;
6741 void **loc;
6742
6743 h = ggc_alloc_type_hash ();
6744 h->hash = hashcode;
6745 h->type = type;
6746 loc = htab_find_slot_with_hash (type_hash_table, h, hashcode, INSERT);
6747 *loc = (void *)h;
6748 }
6749
6750 /* Given TYPE, and HASHCODE its hash code, return the canonical
6751 object for an identical type if one already exists.
6752 Otherwise, return TYPE, and record it as the canonical object.
6753
6754 To use this function, first create a type of the sort you want.
6755 Then compute its hash code from the fields of the type that
6756 make it different from other similar types.
6757 Then call this function and use the value. */
6758
6759 tree
6760 type_hash_canon (unsigned int hashcode, tree type)
6761 {
6762 tree t1;
6763
6764 /* The hash table only contains main variants, so ensure that's what we're
6765 being passed. */
6766 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6767
6768 /* See if the type is in the hash table already. If so, return it.
6769 Otherwise, add the type. */
6770 t1 = type_hash_lookup (hashcode, type);
6771 if (t1 != 0)
6772 {
6773 if (GATHER_STATISTICS)
6774 {
6775 tree_code_counts[(int) TREE_CODE (type)]--;
6776 tree_node_counts[(int) t_kind]--;
6777 tree_node_sizes[(int) t_kind] -= sizeof (struct tree_type_non_common);
6778 }
6779 return t1;
6780 }
6781 else
6782 {
6783 type_hash_add (hashcode, type);
6784 return type;
6785 }
6786 }
6787
6788 /* See if the data pointed to by the type hash table is marked. We consider
6789 it marked if the type is marked or if a debug type number or symbol
6790 table entry has been made for the type. */
6791
6792 static int
6793 type_hash_marked_p (const void *p)
6794 {
6795 const_tree const type = ((const struct type_hash *) p)->type;
6796
6797 return ggc_marked_p (type);
6798 }
6799
6800 static void
6801 print_type_hash_statistics (void)
6802 {
6803 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
6804 (long) htab_size (type_hash_table),
6805 (long) htab_elements (type_hash_table),
6806 htab_collisions (type_hash_table));
6807 }
6808
6809 /* Compute a hash code for a list of attributes (chain of TREE_LIST nodes
6810 with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots),
6811 by adding the hash codes of the individual attributes. */
6812
6813 static unsigned int
6814 attribute_hash_list (const_tree list, hashval_t hashcode)
6815 {
6816 const_tree tail;
6817
6818 for (tail = list; tail; tail = TREE_CHAIN (tail))
6819 /* ??? Do we want to add in TREE_VALUE too? */
6820 hashcode = iterative_hash_object
6821 (IDENTIFIER_HASH_VALUE (get_attribute_name (tail)), hashcode);
6822 return hashcode;
6823 }
6824
6825 /* Given two lists of attributes, return true if list l2 is
6826 equivalent to l1. */
6827
6828 int
6829 attribute_list_equal (const_tree l1, const_tree l2)
6830 {
6831 if (l1 == l2)
6832 return 1;
6833
6834 return attribute_list_contained (l1, l2)
6835 && attribute_list_contained (l2, l1);
6836 }
6837
6838 /* Given two lists of attributes, return true if list L2 is
6839 completely contained within L1. */
6840 /* ??? This would be faster if attribute names were stored in a canonicalized
6841 form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method
6842 must be used to show these elements are equivalent (which they are). */
6843 /* ??? It's not clear that attributes with arguments will always be handled
6844 correctly. */
6845
6846 int
6847 attribute_list_contained (const_tree l1, const_tree l2)
6848 {
6849 const_tree t1, t2;
6850
6851 /* First check the obvious, maybe the lists are identical. */
6852 if (l1 == l2)
6853 return 1;
6854
6855 /* Maybe the lists are similar. */
6856 for (t1 = l1, t2 = l2;
6857 t1 != 0 && t2 != 0
6858 && get_attribute_name (t1) == get_attribute_name (t2)
6859 && TREE_VALUE (t1) == TREE_VALUE (t2);
6860 t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6861 ;
6862
6863 /* Maybe the lists are equal. */
6864 if (t1 == 0 && t2 == 0)
6865 return 1;
6866
6867 for (; t2 != 0; t2 = TREE_CHAIN (t2))
6868 {
6869 const_tree attr;
6870 /* This CONST_CAST is okay because lookup_attribute does not
6871 modify its argument and the return value is assigned to a
6872 const_tree. */
6873 for (attr = lookup_ident_attribute (get_attribute_name (t2),
6874 CONST_CAST_TREE (l1));
6875 attr != NULL_TREE && !attribute_value_equal (t2, attr);
6876 attr = lookup_ident_attribute (get_attribute_name (t2),
6877 TREE_CHAIN (attr)))
6878 ;
6879
6880 if (attr == NULL_TREE)
6881 return 0;
6882 }
6883
6884 return 1;
6885 }
6886
6887 /* Given two lists of types
6888 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6889 return 1 if the lists contain the same types in the same order.
6890 Also, the TREE_PURPOSEs must match. */
6891
6892 int
6893 type_list_equal (const_tree l1, const_tree l2)
6894 {
6895 const_tree t1, t2;
6896
6897 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6898 if (TREE_VALUE (t1) != TREE_VALUE (t2)
6899 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
6900 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
6901 && (TREE_TYPE (TREE_PURPOSE (t1))
6902 == TREE_TYPE (TREE_PURPOSE (t2))))))
6903 return 0;
6904
6905 return t1 == t2;
6906 }
6907
6908 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
6909 given by TYPE. If the argument list accepts variable arguments,
6910 then this function counts only the ordinary arguments. */
6911
6912 int
6913 type_num_arguments (const_tree type)
6914 {
6915 int i = 0;
6916 tree t;
6917
6918 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
6919 /* If the function does not take a variable number of arguments,
6920 the last element in the list will have type `void'. */
6921 if (VOID_TYPE_P (TREE_VALUE (t)))
6922 break;
6923 else
6924 ++i;
6925
6926 return i;
6927 }
6928
6929 /* Nonzero if integer constants T1 and T2
6930 represent the same constant value. */
6931
6932 int
6933 tree_int_cst_equal (const_tree t1, const_tree t2)
6934 {
6935 if (t1 == t2)
6936 return 1;
6937
6938 if (t1 == 0 || t2 == 0)
6939 return 0;
6940
6941 if (TREE_CODE (t1) == INTEGER_CST
6942 && TREE_CODE (t2) == INTEGER_CST
6943 && wi::to_widest (t1) == wi::to_widest (t2))
6944 return 1;
6945
6946 return 0;
6947 }
6948
6949 /* Nonzero if integer constants T1 and T2 represent values that satisfy <.
6950 The precise way of comparison depends on their data type. */
6951
6952 int
6953 tree_int_cst_lt (const_tree t1, const_tree t2)
6954 {
6955 return INT_CST_LT (t1, t2);
6956 }
6957
6958 /* Returns -1 if T1 < T2, 0 if T1 == T2, and 1 if T1 > T2. */
6959
6960 int
6961 tree_int_cst_compare (const_tree t1, const_tree t2)
6962 {
6963 return wi::cmps (wi::to_widest (t1), wi::to_widest (t2));
6964 }
6965
6966 /* Return true if T is an INTEGER_CST whose numerical value (extended
6967 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
6968
6969 bool
6970 tree_fits_shwi_p (const_tree t)
6971 {
6972 return (t != NULL_TREE
6973 && TREE_CODE (t) == INTEGER_CST
6974 && wi::fits_shwi_p (wi::to_widest (t)));
6975 }
6976
6977 /* Return true if T is an INTEGER_CST whose numerical value (extended
6978 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
6979
6980 bool
6981 tree_fits_uhwi_p (const_tree t)
6982 {
6983 return (t != NULL_TREE
6984 && TREE_CODE (t) == INTEGER_CST
6985 && wi::fits_uhwi_p (wi::to_widest (t)));
6986 }
6987
6988 /* T is an INTEGER_CST whose numerical value (extended according to
6989 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
6990 HOST_WIDE_INT. */
6991
6992 HOST_WIDE_INT
6993 tree_to_shwi (const_tree t)
6994 {
6995 gcc_assert (tree_fits_shwi_p (t));
6996 return TREE_INT_CST_ELT (t, 0);
6997 }
6998
6999 /* T is an INTEGER_CST whose numerical value (extended according to
7000 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7001 HOST_WIDE_INT. */
7002
7003 unsigned HOST_WIDE_INT
7004 tree_to_uhwi (const_tree t)
7005 {
7006 gcc_assert (tree_fits_uhwi_p (t));
7007 return TREE_INT_CST_ELT (t, 0);
7008 }
7009
7010 /* Return the most significant (sign) bit of T. */
7011
7012 int
7013 tree_int_cst_sign_bit (const_tree t)
7014 {
7015 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7016
7017 return wi::extract_uhwi (t, bitno, 1);
7018 }
7019
7020 /* Return an indication of the sign of the integer constant T.
7021 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7022 Note that -1 will never be returned if T's type is unsigned. */
7023
7024 int
7025 tree_int_cst_sgn (const_tree t)
7026 {
7027 if (wi::eq_p (t, 0))
7028 return 0;
7029 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7030 return 1;
7031 else if (wi::neg_p (t))
7032 return -1;
7033 else
7034 return 1;
7035 }
7036
7037 /* Return the minimum number of bits needed to represent VALUE in a
7038 signed or unsigned type, UNSIGNEDP says which. */
7039
7040 unsigned int
7041 tree_int_cst_min_precision (tree value, signop sgn)
7042 {
7043 /* If the value is negative, compute its negative minus 1. The latter
7044 adjustment is because the absolute value of the largest negative value
7045 is one larger than the largest positive value. This is equivalent to
7046 a bit-wise negation, so use that operation instead. */
7047
7048 if (tree_int_cst_sgn (value) < 0)
7049 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7050
7051 /* Return the number of bits needed, taking into account the fact
7052 that we need one more bit for a signed than unsigned type.
7053 If value is 0 or -1, the minimum precision is 1 no matter
7054 whether unsignedp is true or false. */
7055
7056 if (integer_zerop (value))
7057 return 1;
7058 else
7059 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7060 }
7061
7062 /* Return truthvalue of whether T1 is the same tree structure as T2.
7063 Return 1 if they are the same.
7064 Return 0 if they are understandably different.
7065 Return -1 if either contains tree structure not understood by
7066 this function. */
7067
7068 int
7069 simple_cst_equal (const_tree t1, const_tree t2)
7070 {
7071 enum tree_code code1, code2;
7072 int cmp;
7073 int i;
7074
7075 if (t1 == t2)
7076 return 1;
7077 if (t1 == 0 || t2 == 0)
7078 return 0;
7079
7080 code1 = TREE_CODE (t1);
7081 code2 = TREE_CODE (t2);
7082
7083 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7084 {
7085 if (CONVERT_EXPR_CODE_P (code2)
7086 || code2 == NON_LVALUE_EXPR)
7087 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7088 else
7089 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7090 }
7091
7092 else if (CONVERT_EXPR_CODE_P (code2)
7093 || code2 == NON_LVALUE_EXPR)
7094 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7095
7096 if (code1 != code2)
7097 return 0;
7098
7099 switch (code1)
7100 {
7101 case INTEGER_CST:
7102 return wi::to_widest (t1) == wi::to_widest (t2);
7103
7104 case REAL_CST:
7105 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (t1), TREE_REAL_CST (t2));
7106
7107 case FIXED_CST:
7108 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7109
7110 case STRING_CST:
7111 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7112 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7113 TREE_STRING_LENGTH (t1)));
7114
7115 case CONSTRUCTOR:
7116 {
7117 unsigned HOST_WIDE_INT idx;
7118 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7119 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7120
7121 if (vec_safe_length (v1) != vec_safe_length (v2))
7122 return false;
7123
7124 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7125 /* ??? Should we handle also fields here? */
7126 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7127 return false;
7128 return true;
7129 }
7130
7131 case SAVE_EXPR:
7132 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7133
7134 case CALL_EXPR:
7135 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7136 if (cmp <= 0)
7137 return cmp;
7138 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7139 return 0;
7140 {
7141 const_tree arg1, arg2;
7142 const_call_expr_arg_iterator iter1, iter2;
7143 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7144 arg2 = first_const_call_expr_arg (t2, &iter2);
7145 arg1 && arg2;
7146 arg1 = next_const_call_expr_arg (&iter1),
7147 arg2 = next_const_call_expr_arg (&iter2))
7148 {
7149 cmp = simple_cst_equal (arg1, arg2);
7150 if (cmp <= 0)
7151 return cmp;
7152 }
7153 return arg1 == arg2;
7154 }
7155
7156 case TARGET_EXPR:
7157 /* Special case: if either target is an unallocated VAR_DECL,
7158 it means that it's going to be unified with whatever the
7159 TARGET_EXPR is really supposed to initialize, so treat it
7160 as being equivalent to anything. */
7161 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7162 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7163 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7164 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7165 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7166 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7167 cmp = 1;
7168 else
7169 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7170
7171 if (cmp <= 0)
7172 return cmp;
7173
7174 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7175
7176 case WITH_CLEANUP_EXPR:
7177 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7178 if (cmp <= 0)
7179 return cmp;
7180
7181 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7182
7183 case COMPONENT_REF:
7184 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7185 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7186
7187 return 0;
7188
7189 case VAR_DECL:
7190 case PARM_DECL:
7191 case CONST_DECL:
7192 case FUNCTION_DECL:
7193 return 0;
7194
7195 default:
7196 break;
7197 }
7198
7199 /* This general rule works for most tree codes. All exceptions should be
7200 handled above. If this is a language-specific tree code, we can't
7201 trust what might be in the operand, so say we don't know
7202 the situation. */
7203 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7204 return -1;
7205
7206 switch (TREE_CODE_CLASS (code1))
7207 {
7208 case tcc_unary:
7209 case tcc_binary:
7210 case tcc_comparison:
7211 case tcc_expression:
7212 case tcc_reference:
7213 case tcc_statement:
7214 cmp = 1;
7215 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7216 {
7217 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7218 if (cmp <= 0)
7219 return cmp;
7220 }
7221
7222 return cmp;
7223
7224 default:
7225 return -1;
7226 }
7227 }
7228
7229 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7230 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7231 than U, respectively. */
7232
7233 int
7234 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7235 {
7236 if (tree_int_cst_sgn (t) < 0)
7237 return -1;
7238 else if (!cst_fits_uhwi_p (t))
7239 return 1;
7240 else if (TREE_INT_CST_LOW (t) == u)
7241 return 0;
7242 else if (TREE_INT_CST_LOW (t) < u)
7243 return -1;
7244 else
7245 return 1;
7246 }
7247
7248 /* Return true if SIZE represents a constant size that is in bounds of
7249 what the middle-end and the backend accepts (covering not more than
7250 half of the address-space). */
7251
7252 bool
7253 valid_constant_size_p (const_tree size)
7254 {
7255 if (! tree_fits_uhwi_p (size)
7256 || TREE_OVERFLOW (size)
7257 || tree_int_cst_sign_bit (size) != 0)
7258 return false;
7259 return true;
7260 }
7261
7262 /* Return the precision of the type, or for a complex or vector type the
7263 precision of the type of its elements. */
7264
7265 unsigned int
7266 element_precision (const_tree type)
7267 {
7268 enum tree_code code = TREE_CODE (type);
7269 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7270 type = TREE_TYPE (type);
7271
7272 return TYPE_PRECISION (type);
7273 }
7274
7275 /* Return true if CODE represents an associative tree code. Otherwise
7276 return false. */
7277 bool
7278 associative_tree_code (enum tree_code code)
7279 {
7280 switch (code)
7281 {
7282 case BIT_IOR_EXPR:
7283 case BIT_AND_EXPR:
7284 case BIT_XOR_EXPR:
7285 case PLUS_EXPR:
7286 case MULT_EXPR:
7287 case MIN_EXPR:
7288 case MAX_EXPR:
7289 return true;
7290
7291 default:
7292 break;
7293 }
7294 return false;
7295 }
7296
7297 /* Return true if CODE represents a commutative tree code. Otherwise
7298 return false. */
7299 bool
7300 commutative_tree_code (enum tree_code code)
7301 {
7302 switch (code)
7303 {
7304 case PLUS_EXPR:
7305 case MULT_EXPR:
7306 case MULT_HIGHPART_EXPR:
7307 case MIN_EXPR:
7308 case MAX_EXPR:
7309 case BIT_IOR_EXPR:
7310 case BIT_XOR_EXPR:
7311 case BIT_AND_EXPR:
7312 case NE_EXPR:
7313 case EQ_EXPR:
7314 case UNORDERED_EXPR:
7315 case ORDERED_EXPR:
7316 case UNEQ_EXPR:
7317 case LTGT_EXPR:
7318 case TRUTH_AND_EXPR:
7319 case TRUTH_XOR_EXPR:
7320 case TRUTH_OR_EXPR:
7321 case WIDEN_MULT_EXPR:
7322 case VEC_WIDEN_MULT_HI_EXPR:
7323 case VEC_WIDEN_MULT_LO_EXPR:
7324 case VEC_WIDEN_MULT_EVEN_EXPR:
7325 case VEC_WIDEN_MULT_ODD_EXPR:
7326 return true;
7327
7328 default:
7329 break;
7330 }
7331 return false;
7332 }
7333
7334 /* Return true if CODE represents a ternary tree code for which the
7335 first two operands are commutative. Otherwise return false. */
7336 bool
7337 commutative_ternary_tree_code (enum tree_code code)
7338 {
7339 switch (code)
7340 {
7341 case WIDEN_MULT_PLUS_EXPR:
7342 case WIDEN_MULT_MINUS_EXPR:
7343 return true;
7344
7345 default:
7346 break;
7347 }
7348 return false;
7349 }
7350
7351 /* Generate a hash value for an expression. This can be used iteratively
7352 by passing a previous result as the VAL argument.
7353
7354 This function is intended to produce the same hash for expressions which
7355 would compare equal using operand_equal_p. */
7356
7357 hashval_t
7358 iterative_hash_expr (const_tree t, hashval_t val)
7359 {
7360 int i;
7361 enum tree_code code;
7362 char tclass;
7363
7364 if (t == NULL_TREE)
7365 return iterative_hash_hashval_t (0, val);
7366
7367 code = TREE_CODE (t);
7368
7369 switch (code)
7370 {
7371 /* Alas, constants aren't shared, so we can't rely on pointer
7372 identity. */
7373 case INTEGER_CST:
7374 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
7375 val = iterative_hash_host_wide_int (TREE_INT_CST_ELT (t, i), val);
7376 return val;
7377 case REAL_CST:
7378 {
7379 unsigned int val2 = real_hash (TREE_REAL_CST_PTR (t));
7380
7381 return iterative_hash_hashval_t (val2, val);
7382 }
7383 case FIXED_CST:
7384 {
7385 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7386
7387 return iterative_hash_hashval_t (val2, val);
7388 }
7389 case STRING_CST:
7390 return iterative_hash (TREE_STRING_POINTER (t),
7391 TREE_STRING_LENGTH (t), val);
7392 case COMPLEX_CST:
7393 val = iterative_hash_expr (TREE_REALPART (t), val);
7394 return iterative_hash_expr (TREE_IMAGPART (t), val);
7395 case VECTOR_CST:
7396 {
7397 unsigned i;
7398 for (i = 0; i < VECTOR_CST_NELTS (t); ++i)
7399 val = iterative_hash_expr (VECTOR_CST_ELT (t, i), val);
7400 return val;
7401 }
7402 case SSA_NAME:
7403 /* We can just compare by pointer. */
7404 return iterative_hash_host_wide_int (SSA_NAME_VERSION (t), val);
7405 case PLACEHOLDER_EXPR:
7406 /* The node itself doesn't matter. */
7407 return val;
7408 case TREE_LIST:
7409 /* A list of expressions, for a CALL_EXPR or as the elements of a
7410 VECTOR_CST. */
7411 for (; t; t = TREE_CHAIN (t))
7412 val = iterative_hash_expr (TREE_VALUE (t), val);
7413 return val;
7414 case CONSTRUCTOR:
7415 {
7416 unsigned HOST_WIDE_INT idx;
7417 tree field, value;
7418 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7419 {
7420 val = iterative_hash_expr (field, val);
7421 val = iterative_hash_expr (value, val);
7422 }
7423 return val;
7424 }
7425 case FUNCTION_DECL:
7426 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7427 Otherwise nodes that compare equal according to operand_equal_p might
7428 get different hash codes. However, don't do this for machine specific
7429 or front end builtins, since the function code is overloaded in those
7430 cases. */
7431 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7432 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7433 {
7434 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7435 code = TREE_CODE (t);
7436 }
7437 /* FALL THROUGH */
7438 default:
7439 tclass = TREE_CODE_CLASS (code);
7440
7441 if (tclass == tcc_declaration)
7442 {
7443 /* DECL's have a unique ID */
7444 val = iterative_hash_host_wide_int (DECL_UID (t), val);
7445 }
7446 else
7447 {
7448 gcc_assert (IS_EXPR_CODE_CLASS (tclass));
7449
7450 val = iterative_hash_object (code, val);
7451
7452 /* Don't hash the type, that can lead to having nodes which
7453 compare equal according to operand_equal_p, but which
7454 have different hash codes. */
7455 if (CONVERT_EXPR_CODE_P (code)
7456 || code == NON_LVALUE_EXPR)
7457 {
7458 /* Make sure to include signness in the hash computation. */
7459 val += TYPE_UNSIGNED (TREE_TYPE (t));
7460 val = iterative_hash_expr (TREE_OPERAND (t, 0), val);
7461 }
7462
7463 else if (commutative_tree_code (code))
7464 {
7465 /* It's a commutative expression. We want to hash it the same
7466 however it appears. We do this by first hashing both operands
7467 and then rehashing based on the order of their independent
7468 hashes. */
7469 hashval_t one = iterative_hash_expr (TREE_OPERAND (t, 0), 0);
7470 hashval_t two = iterative_hash_expr (TREE_OPERAND (t, 1), 0);
7471 hashval_t t;
7472
7473 if (one > two)
7474 t = one, one = two, two = t;
7475
7476 val = iterative_hash_hashval_t (one, val);
7477 val = iterative_hash_hashval_t (two, val);
7478 }
7479 else
7480 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
7481 val = iterative_hash_expr (TREE_OPERAND (t, i), val);
7482 }
7483 return val;
7484 }
7485 }
7486
7487 /* Constructors for pointer, array and function types.
7488 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7489 constructed by language-dependent code, not here.) */
7490
7491 /* Construct, lay out and return the type of pointers to TO_TYPE with
7492 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7493 reference all of memory. If such a type has already been
7494 constructed, reuse it. */
7495
7496 tree
7497 build_pointer_type_for_mode (tree to_type, enum machine_mode mode,
7498 bool can_alias_all)
7499 {
7500 tree t;
7501
7502 if (to_type == error_mark_node)
7503 return error_mark_node;
7504
7505 /* If the pointed-to type has the may_alias attribute set, force
7506 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7507 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7508 can_alias_all = true;
7509
7510 /* In some cases, languages will have things that aren't a POINTER_TYPE
7511 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7512 In that case, return that type without regard to the rest of our
7513 operands.
7514
7515 ??? This is a kludge, but consistent with the way this function has
7516 always operated and there doesn't seem to be a good way to avoid this
7517 at the moment. */
7518 if (TYPE_POINTER_TO (to_type) != 0
7519 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7520 return TYPE_POINTER_TO (to_type);
7521
7522 /* First, if we already have a type for pointers to TO_TYPE and it's
7523 the proper mode, use it. */
7524 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7525 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7526 return t;
7527
7528 t = make_node (POINTER_TYPE);
7529
7530 TREE_TYPE (t) = to_type;
7531 SET_TYPE_MODE (t, mode);
7532 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7533 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7534 TYPE_POINTER_TO (to_type) = t;
7535
7536 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7537 SET_TYPE_STRUCTURAL_EQUALITY (t);
7538 else if (TYPE_CANONICAL (to_type) != to_type)
7539 TYPE_CANONICAL (t)
7540 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7541 mode, can_alias_all);
7542
7543 /* Lay out the type. This function has many callers that are concerned
7544 with expression-construction, and this simplifies them all. */
7545 layout_type (t);
7546
7547 return t;
7548 }
7549
7550 /* By default build pointers in ptr_mode. */
7551
7552 tree
7553 build_pointer_type (tree to_type)
7554 {
7555 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7556 : TYPE_ADDR_SPACE (to_type);
7557 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7558 return build_pointer_type_for_mode (to_type, pointer_mode, false);
7559 }
7560
7561 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7562
7563 tree
7564 build_reference_type_for_mode (tree to_type, enum machine_mode mode,
7565 bool can_alias_all)
7566 {
7567 tree t;
7568
7569 if (to_type == error_mark_node)
7570 return error_mark_node;
7571
7572 /* If the pointed-to type has the may_alias attribute set, force
7573 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7574 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7575 can_alias_all = true;
7576
7577 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7578 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7579 In that case, return that type without regard to the rest of our
7580 operands.
7581
7582 ??? This is a kludge, but consistent with the way this function has
7583 always operated and there doesn't seem to be a good way to avoid this
7584 at the moment. */
7585 if (TYPE_REFERENCE_TO (to_type) != 0
7586 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7587 return TYPE_REFERENCE_TO (to_type);
7588
7589 /* First, if we already have a type for pointers to TO_TYPE and it's
7590 the proper mode, use it. */
7591 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7592 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7593 return t;
7594
7595 t = make_node (REFERENCE_TYPE);
7596
7597 TREE_TYPE (t) = to_type;
7598 SET_TYPE_MODE (t, mode);
7599 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7600 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7601 TYPE_REFERENCE_TO (to_type) = t;
7602
7603 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7604 SET_TYPE_STRUCTURAL_EQUALITY (t);
7605 else if (TYPE_CANONICAL (to_type) != to_type)
7606 TYPE_CANONICAL (t)
7607 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7608 mode, can_alias_all);
7609
7610 layout_type (t);
7611
7612 return t;
7613 }
7614
7615
7616 /* Build the node for the type of references-to-TO_TYPE by default
7617 in ptr_mode. */
7618
7619 tree
7620 build_reference_type (tree to_type)
7621 {
7622 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7623 : TYPE_ADDR_SPACE (to_type);
7624 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7625 return build_reference_type_for_mode (to_type, pointer_mode, false);
7626 }
7627
7628 #define MAX_INT_CACHED_PREC \
7629 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7630 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7631
7632 /* Builds a signed or unsigned integer type of precision PRECISION.
7633 Used for C bitfields whose precision does not match that of
7634 built-in target types. */
7635 tree
7636 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7637 int unsignedp)
7638 {
7639 tree itype, ret;
7640
7641 if (unsignedp)
7642 unsignedp = MAX_INT_CACHED_PREC + 1;
7643
7644 if (precision <= MAX_INT_CACHED_PREC)
7645 {
7646 itype = nonstandard_integer_type_cache[precision + unsignedp];
7647 if (itype)
7648 return itype;
7649 }
7650
7651 itype = make_node (INTEGER_TYPE);
7652 TYPE_PRECISION (itype) = precision;
7653
7654 if (unsignedp)
7655 fixup_unsigned_type (itype);
7656 else
7657 fixup_signed_type (itype);
7658
7659 ret = itype;
7660 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype)))
7661 ret = type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype)), itype);
7662 if (precision <= MAX_INT_CACHED_PREC)
7663 nonstandard_integer_type_cache[precision + unsignedp] = ret;
7664
7665 return ret;
7666 }
7667
7668 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7669 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7670 is true, reuse such a type that has already been constructed. */
7671
7672 static tree
7673 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7674 {
7675 tree itype = make_node (INTEGER_TYPE);
7676 hashval_t hashcode = 0;
7677
7678 TREE_TYPE (itype) = type;
7679
7680 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7681 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7682
7683 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7684 SET_TYPE_MODE (itype, TYPE_MODE (type));
7685 TYPE_SIZE (itype) = TYPE_SIZE (type);
7686 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7687 TYPE_ALIGN (itype) = TYPE_ALIGN (type);
7688 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7689
7690 if (!shared)
7691 return itype;
7692
7693 if ((TYPE_MIN_VALUE (itype)
7694 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7695 || (TYPE_MAX_VALUE (itype)
7696 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7697 {
7698 /* Since we cannot reliably merge this type, we need to compare it using
7699 structural equality checks. */
7700 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7701 return itype;
7702 }
7703
7704 hashcode = iterative_hash_expr (TYPE_MIN_VALUE (itype), hashcode);
7705 hashcode = iterative_hash_expr (TYPE_MAX_VALUE (itype), hashcode);
7706 hashcode = iterative_hash_hashval_t (TYPE_HASH (type), hashcode);
7707 itype = type_hash_canon (hashcode, itype);
7708
7709 return itype;
7710 }
7711
7712 /* Wrapper around build_range_type_1 with SHARED set to true. */
7713
7714 tree
7715 build_range_type (tree type, tree lowval, tree highval)
7716 {
7717 return build_range_type_1 (type, lowval, highval, true);
7718 }
7719
7720 /* Wrapper around build_range_type_1 with SHARED set to false. */
7721
7722 tree
7723 build_nonshared_range_type (tree type, tree lowval, tree highval)
7724 {
7725 return build_range_type_1 (type, lowval, highval, false);
7726 }
7727
7728 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7729 MAXVAL should be the maximum value in the domain
7730 (one less than the length of the array).
7731
7732 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7733 We don't enforce this limit, that is up to caller (e.g. language front end).
7734 The limit exists because the result is a signed type and we don't handle
7735 sizes that use more than one HOST_WIDE_INT. */
7736
7737 tree
7738 build_index_type (tree maxval)
7739 {
7740 return build_range_type (sizetype, size_zero_node, maxval);
7741 }
7742
7743 /* Return true if the debug information for TYPE, a subtype, should be emitted
7744 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7745 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7746 debug info and doesn't reflect the source code. */
7747
7748 bool
7749 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7750 {
7751 tree base_type = TREE_TYPE (type), low, high;
7752
7753 /* Subrange types have a base type which is an integral type. */
7754 if (!INTEGRAL_TYPE_P (base_type))
7755 return false;
7756
7757 /* Get the real bounds of the subtype. */
7758 if (lang_hooks.types.get_subrange_bounds)
7759 lang_hooks.types.get_subrange_bounds (type, &low, &high);
7760 else
7761 {
7762 low = TYPE_MIN_VALUE (type);
7763 high = TYPE_MAX_VALUE (type);
7764 }
7765
7766 /* If the type and its base type have the same representation and the same
7767 name, then the type is not a subrange but a copy of the base type. */
7768 if ((TREE_CODE (base_type) == INTEGER_TYPE
7769 || TREE_CODE (base_type) == BOOLEAN_TYPE)
7770 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
7771 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
7772 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type)))
7773 {
7774 tree type_name = TYPE_NAME (type);
7775 tree base_type_name = TYPE_NAME (base_type);
7776
7777 if (type_name && TREE_CODE (type_name) == TYPE_DECL)
7778 type_name = DECL_NAME (type_name);
7779
7780 if (base_type_name && TREE_CODE (base_type_name) == TYPE_DECL)
7781 base_type_name = DECL_NAME (base_type_name);
7782
7783 if (type_name == base_type_name)
7784 return false;
7785 }
7786
7787 if (lowval)
7788 *lowval = low;
7789 if (highval)
7790 *highval = high;
7791 return true;
7792 }
7793
7794 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7795 and number of elements specified by the range of values of INDEX_TYPE.
7796 If SHARED is true, reuse such a type that has already been constructed. */
7797
7798 static tree
7799 build_array_type_1 (tree elt_type, tree index_type, bool shared)
7800 {
7801 tree t;
7802
7803 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
7804 {
7805 error ("arrays of functions are not meaningful");
7806 elt_type = integer_type_node;
7807 }
7808
7809 t = make_node (ARRAY_TYPE);
7810 TREE_TYPE (t) = elt_type;
7811 TYPE_DOMAIN (t) = index_type;
7812 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
7813 layout_type (t);
7814
7815 /* If the element type is incomplete at this point we get marked for
7816 structural equality. Do not record these types in the canonical
7817 type hashtable. */
7818 if (TYPE_STRUCTURAL_EQUALITY_P (t))
7819 return t;
7820
7821 if (shared)
7822 {
7823 hashval_t hashcode = iterative_hash_object (TYPE_HASH (elt_type), 0);
7824 if (index_type)
7825 hashcode = iterative_hash_object (TYPE_HASH (index_type), hashcode);
7826 t = type_hash_canon (hashcode, t);
7827 }
7828
7829 if (TYPE_CANONICAL (t) == t)
7830 {
7831 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
7832 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type)))
7833 SET_TYPE_STRUCTURAL_EQUALITY (t);
7834 else if (TYPE_CANONICAL (elt_type) != elt_type
7835 || (index_type && TYPE_CANONICAL (index_type) != index_type))
7836 TYPE_CANONICAL (t)
7837 = build_array_type_1 (TYPE_CANONICAL (elt_type),
7838 index_type
7839 ? TYPE_CANONICAL (index_type) : NULL_TREE,
7840 shared);
7841 }
7842
7843 return t;
7844 }
7845
7846 /* Wrapper around build_array_type_1 with SHARED set to true. */
7847
7848 tree
7849 build_array_type (tree elt_type, tree index_type)
7850 {
7851 return build_array_type_1 (elt_type, index_type, true);
7852 }
7853
7854 /* Wrapper around build_array_type_1 with SHARED set to false. */
7855
7856 tree
7857 build_nonshared_array_type (tree elt_type, tree index_type)
7858 {
7859 return build_array_type_1 (elt_type, index_type, false);
7860 }
7861
7862 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7863 sizetype. */
7864
7865 tree
7866 build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
7867 {
7868 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
7869 }
7870
7871 /* Recursively examines the array elements of TYPE, until a non-array
7872 element type is found. */
7873
7874 tree
7875 strip_array_types (tree type)
7876 {
7877 while (TREE_CODE (type) == ARRAY_TYPE)
7878 type = TREE_TYPE (type);
7879
7880 return type;
7881 }
7882
7883 /* Computes the canonical argument types from the argument type list
7884 ARGTYPES.
7885
7886 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7887 on entry to this function, or if any of the ARGTYPES are
7888 structural.
7889
7890 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7891 true on entry to this function, or if any of the ARGTYPES are
7892 non-canonical.
7893
7894 Returns a canonical argument list, which may be ARGTYPES when the
7895 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7896 true) or would not differ from ARGTYPES. */
7897
7898 static tree
7899 maybe_canonicalize_argtypes (tree argtypes,
7900 bool *any_structural_p,
7901 bool *any_noncanonical_p)
7902 {
7903 tree arg;
7904 bool any_noncanonical_argtypes_p = false;
7905
7906 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
7907 {
7908 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
7909 /* Fail gracefully by stating that the type is structural. */
7910 *any_structural_p = true;
7911 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
7912 *any_structural_p = true;
7913 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
7914 || TREE_PURPOSE (arg))
7915 /* If the argument has a default argument, we consider it
7916 non-canonical even though the type itself is canonical.
7917 That way, different variants of function and method types
7918 with default arguments will all point to the variant with
7919 no defaults as their canonical type. */
7920 any_noncanonical_argtypes_p = true;
7921 }
7922
7923 if (*any_structural_p)
7924 return argtypes;
7925
7926 if (any_noncanonical_argtypes_p)
7927 {
7928 /* Build the canonical list of argument types. */
7929 tree canon_argtypes = NULL_TREE;
7930 bool is_void = false;
7931
7932 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
7933 {
7934 if (arg == void_list_node)
7935 is_void = true;
7936 else
7937 canon_argtypes = tree_cons (NULL_TREE,
7938 TYPE_CANONICAL (TREE_VALUE (arg)),
7939 canon_argtypes);
7940 }
7941
7942 canon_argtypes = nreverse (canon_argtypes);
7943 if (is_void)
7944 canon_argtypes = chainon (canon_argtypes, void_list_node);
7945
7946 /* There is a non-canonical type. */
7947 *any_noncanonical_p = true;
7948 return canon_argtypes;
7949 }
7950
7951 /* The canonical argument types are the same as ARGTYPES. */
7952 return argtypes;
7953 }
7954
7955 /* Construct, lay out and return
7956 the type of functions returning type VALUE_TYPE
7957 given arguments of types ARG_TYPES.
7958 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
7959 are data type nodes for the arguments of the function.
7960 If such a type has already been constructed, reuse it. */
7961
7962 tree
7963 build_function_type (tree value_type, tree arg_types)
7964 {
7965 tree t;
7966 hashval_t hashcode = 0;
7967 bool any_structural_p, any_noncanonical_p;
7968 tree canon_argtypes;
7969
7970 if (TREE_CODE (value_type) == FUNCTION_TYPE)
7971 {
7972 error ("function return type cannot be function");
7973 value_type = integer_type_node;
7974 }
7975
7976 /* Make a node of the sort we want. */
7977 t = make_node (FUNCTION_TYPE);
7978 TREE_TYPE (t) = value_type;
7979 TYPE_ARG_TYPES (t) = arg_types;
7980
7981 /* If we already have such a type, use the old one. */
7982 hashcode = iterative_hash_object (TYPE_HASH (value_type), hashcode);
7983 hashcode = type_hash_list (arg_types, hashcode);
7984 t = type_hash_canon (hashcode, t);
7985
7986 /* Set up the canonical type. */
7987 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
7988 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
7989 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
7990 &any_structural_p,
7991 &any_noncanonical_p);
7992 if (any_structural_p)
7993 SET_TYPE_STRUCTURAL_EQUALITY (t);
7994 else if (any_noncanonical_p)
7995 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
7996 canon_argtypes);
7997
7998 if (!COMPLETE_TYPE_P (t))
7999 layout_type (t);
8000 return t;
8001 }
8002
8003 /* Build a function type. The RETURN_TYPE is the type returned by the
8004 function. If VAARGS is set, no void_type_node is appended to the
8005 the list. ARGP must be always be terminated be a NULL_TREE. */
8006
8007 static tree
8008 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8009 {
8010 tree t, args, last;
8011
8012 t = va_arg (argp, tree);
8013 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8014 args = tree_cons (NULL_TREE, t, args);
8015
8016 if (vaargs)
8017 {
8018 last = args;
8019 if (args != NULL_TREE)
8020 args = nreverse (args);
8021 gcc_assert (last != void_list_node);
8022 }
8023 else if (args == NULL_TREE)
8024 args = void_list_node;
8025 else
8026 {
8027 last = args;
8028 args = nreverse (args);
8029 TREE_CHAIN (last) = void_list_node;
8030 }
8031 args = build_function_type (return_type, args);
8032
8033 return args;
8034 }
8035
8036 /* Build a function type. The RETURN_TYPE is the type returned by the
8037 function. If additional arguments are provided, they are
8038 additional argument types. The list of argument types must always
8039 be terminated by NULL_TREE. */
8040
8041 tree
8042 build_function_type_list (tree return_type, ...)
8043 {
8044 tree args;
8045 va_list p;
8046
8047 va_start (p, return_type);
8048 args = build_function_type_list_1 (false, return_type, p);
8049 va_end (p);
8050 return args;
8051 }
8052
8053 /* Build a variable argument function type. The RETURN_TYPE is the
8054 type returned by the function. If additional arguments are provided,
8055 they are additional argument types. The list of argument types must
8056 always be terminated by NULL_TREE. */
8057
8058 tree
8059 build_varargs_function_type_list (tree return_type, ...)
8060 {
8061 tree args;
8062 va_list p;
8063
8064 va_start (p, return_type);
8065 args = build_function_type_list_1 (true, return_type, p);
8066 va_end (p);
8067
8068 return args;
8069 }
8070
8071 /* Build a function type. RETURN_TYPE is the type returned by the
8072 function; VAARGS indicates whether the function takes varargs. The
8073 function takes N named arguments, the types of which are provided in
8074 ARG_TYPES. */
8075
8076 static tree
8077 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8078 tree *arg_types)
8079 {
8080 int i;
8081 tree t = vaargs ? NULL_TREE : void_list_node;
8082
8083 for (i = n - 1; i >= 0; i--)
8084 t = tree_cons (NULL_TREE, arg_types[i], t);
8085
8086 return build_function_type (return_type, t);
8087 }
8088
8089 /* Build a function type. RETURN_TYPE is the type returned by the
8090 function. The function takes N named arguments, the types of which
8091 are provided in ARG_TYPES. */
8092
8093 tree
8094 build_function_type_array (tree return_type, int n, tree *arg_types)
8095 {
8096 return build_function_type_array_1 (false, return_type, n, arg_types);
8097 }
8098
8099 /* Build a variable argument function type. RETURN_TYPE is the type
8100 returned by the function. The function takes N named arguments, the
8101 types of which are provided in ARG_TYPES. */
8102
8103 tree
8104 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8105 {
8106 return build_function_type_array_1 (true, return_type, n, arg_types);
8107 }
8108
8109 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8110 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8111 for the method. An implicit additional parameter (of type
8112 pointer-to-BASETYPE) is added to the ARGTYPES. */
8113
8114 tree
8115 build_method_type_directly (tree basetype,
8116 tree rettype,
8117 tree argtypes)
8118 {
8119 tree t;
8120 tree ptype;
8121 int hashcode = 0;
8122 bool any_structural_p, any_noncanonical_p;
8123 tree canon_argtypes;
8124
8125 /* Make a node of the sort we want. */
8126 t = make_node (METHOD_TYPE);
8127
8128 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8129 TREE_TYPE (t) = rettype;
8130 ptype = build_pointer_type (basetype);
8131
8132 /* The actual arglist for this function includes a "hidden" argument
8133 which is "this". Put it into the list of argument types. */
8134 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8135 TYPE_ARG_TYPES (t) = argtypes;
8136
8137 /* If we already have such a type, use the old one. */
8138 hashcode = iterative_hash_object (TYPE_HASH (basetype), hashcode);
8139 hashcode = iterative_hash_object (TYPE_HASH (rettype), hashcode);
8140 hashcode = type_hash_list (argtypes, hashcode);
8141 t = type_hash_canon (hashcode, t);
8142
8143 /* Set up the canonical type. */
8144 any_structural_p
8145 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8146 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8147 any_noncanonical_p
8148 = (TYPE_CANONICAL (basetype) != basetype
8149 || TYPE_CANONICAL (rettype) != rettype);
8150 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8151 &any_structural_p,
8152 &any_noncanonical_p);
8153 if (any_structural_p)
8154 SET_TYPE_STRUCTURAL_EQUALITY (t);
8155 else if (any_noncanonical_p)
8156 TYPE_CANONICAL (t)
8157 = build_method_type_directly (TYPE_CANONICAL (basetype),
8158 TYPE_CANONICAL (rettype),
8159 canon_argtypes);
8160 if (!COMPLETE_TYPE_P (t))
8161 layout_type (t);
8162
8163 return t;
8164 }
8165
8166 /* Construct, lay out and return the type of methods belonging to class
8167 BASETYPE and whose arguments and values are described by TYPE.
8168 If that type exists already, reuse it.
8169 TYPE must be a FUNCTION_TYPE node. */
8170
8171 tree
8172 build_method_type (tree basetype, tree type)
8173 {
8174 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8175
8176 return build_method_type_directly (basetype,
8177 TREE_TYPE (type),
8178 TYPE_ARG_TYPES (type));
8179 }
8180
8181 /* Construct, lay out and return the type of offsets to a value
8182 of type TYPE, within an object of type BASETYPE.
8183 If a suitable offset type exists already, reuse it. */
8184
8185 tree
8186 build_offset_type (tree basetype, tree type)
8187 {
8188 tree t;
8189 hashval_t hashcode = 0;
8190
8191 /* Make a node of the sort we want. */
8192 t = make_node (OFFSET_TYPE);
8193
8194 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8195 TREE_TYPE (t) = type;
8196
8197 /* If we already have such a type, use the old one. */
8198 hashcode = iterative_hash_object (TYPE_HASH (basetype), hashcode);
8199 hashcode = iterative_hash_object (TYPE_HASH (type), hashcode);
8200 t = type_hash_canon (hashcode, t);
8201
8202 if (!COMPLETE_TYPE_P (t))
8203 layout_type (t);
8204
8205 if (TYPE_CANONICAL (t) == t)
8206 {
8207 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8208 || TYPE_STRUCTURAL_EQUALITY_P (type))
8209 SET_TYPE_STRUCTURAL_EQUALITY (t);
8210 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8211 || TYPE_CANONICAL (type) != type)
8212 TYPE_CANONICAL (t)
8213 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8214 TYPE_CANONICAL (type));
8215 }
8216
8217 return t;
8218 }
8219
8220 /* Create a complex type whose components are COMPONENT_TYPE. */
8221
8222 tree
8223 build_complex_type (tree component_type)
8224 {
8225 tree t;
8226 hashval_t hashcode;
8227
8228 gcc_assert (INTEGRAL_TYPE_P (component_type)
8229 || SCALAR_FLOAT_TYPE_P (component_type)
8230 || FIXED_POINT_TYPE_P (component_type));
8231
8232 /* Make a node of the sort we want. */
8233 t = make_node (COMPLEX_TYPE);
8234
8235 TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type);
8236
8237 /* If we already have such a type, use the old one. */
8238 hashcode = iterative_hash_object (TYPE_HASH (component_type), 0);
8239 t = type_hash_canon (hashcode, t);
8240
8241 if (!COMPLETE_TYPE_P (t))
8242 layout_type (t);
8243
8244 if (TYPE_CANONICAL (t) == t)
8245 {
8246 if (TYPE_STRUCTURAL_EQUALITY_P (component_type))
8247 SET_TYPE_STRUCTURAL_EQUALITY (t);
8248 else if (TYPE_CANONICAL (component_type) != component_type)
8249 TYPE_CANONICAL (t)
8250 = build_complex_type (TYPE_CANONICAL (component_type));
8251 }
8252
8253 /* We need to create a name, since complex is a fundamental type. */
8254 if (! TYPE_NAME (t))
8255 {
8256 const char *name;
8257 if (component_type == char_type_node)
8258 name = "complex char";
8259 else if (component_type == signed_char_type_node)
8260 name = "complex signed char";
8261 else if (component_type == unsigned_char_type_node)
8262 name = "complex unsigned char";
8263 else if (component_type == short_integer_type_node)
8264 name = "complex short int";
8265 else if (component_type == short_unsigned_type_node)
8266 name = "complex short unsigned int";
8267 else if (component_type == integer_type_node)
8268 name = "complex int";
8269 else if (component_type == unsigned_type_node)
8270 name = "complex unsigned int";
8271 else if (component_type == long_integer_type_node)
8272 name = "complex long int";
8273 else if (component_type == long_unsigned_type_node)
8274 name = "complex long unsigned int";
8275 else if (component_type == long_long_integer_type_node)
8276 name = "complex long long int";
8277 else if (component_type == long_long_unsigned_type_node)
8278 name = "complex long long unsigned int";
8279 else
8280 name = 0;
8281
8282 if (name != 0)
8283 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8284 get_identifier (name), t);
8285 }
8286
8287 return build_qualified_type (t, TYPE_QUALS (component_type));
8288 }
8289
8290 /* If TYPE is a real or complex floating-point type and the target
8291 does not directly support arithmetic on TYPE then return the wider
8292 type to be used for arithmetic on TYPE. Otherwise, return
8293 NULL_TREE. */
8294
8295 tree
8296 excess_precision_type (tree type)
8297 {
8298 if (flag_excess_precision != EXCESS_PRECISION_FAST)
8299 {
8300 int flt_eval_method = TARGET_FLT_EVAL_METHOD;
8301 switch (TREE_CODE (type))
8302 {
8303 case REAL_TYPE:
8304 switch (flt_eval_method)
8305 {
8306 case 1:
8307 if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
8308 return double_type_node;
8309 break;
8310 case 2:
8311 if (TYPE_MODE (type) == TYPE_MODE (float_type_node)
8312 || TYPE_MODE (type) == TYPE_MODE (double_type_node))
8313 return long_double_type_node;
8314 break;
8315 default:
8316 gcc_unreachable ();
8317 }
8318 break;
8319 case COMPLEX_TYPE:
8320 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8321 return NULL_TREE;
8322 switch (flt_eval_method)
8323 {
8324 case 1:
8325 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node))
8326 return complex_double_type_node;
8327 break;
8328 case 2:
8329 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node)
8330 || (TYPE_MODE (TREE_TYPE (type))
8331 == TYPE_MODE (double_type_node)))
8332 return complex_long_double_type_node;
8333 break;
8334 default:
8335 gcc_unreachable ();
8336 }
8337 break;
8338 default:
8339 break;
8340 }
8341 }
8342 return NULL_TREE;
8343 }
8344 \f
8345 /* Return OP, stripped of any conversions to wider types as much as is safe.
8346 Converting the value back to OP's type makes a value equivalent to OP.
8347
8348 If FOR_TYPE is nonzero, we return a value which, if converted to
8349 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8350
8351 OP must have integer, real or enumeral type. Pointers are not allowed!
8352
8353 There are some cases where the obvious value we could return
8354 would regenerate to OP if converted to OP's type,
8355 but would not extend like OP to wider types.
8356 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8357 For example, if OP is (unsigned short)(signed char)-1,
8358 we avoid returning (signed char)-1 if FOR_TYPE is int,
8359 even though extending that to an unsigned short would regenerate OP,
8360 since the result of extending (signed char)-1 to (int)
8361 is different from (int) OP. */
8362
8363 tree
8364 get_unwidened (tree op, tree for_type)
8365 {
8366 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8367 tree type = TREE_TYPE (op);
8368 unsigned final_prec
8369 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8370 int uns
8371 = (for_type != 0 && for_type != type
8372 && final_prec > TYPE_PRECISION (type)
8373 && TYPE_UNSIGNED (type));
8374 tree win = op;
8375
8376 while (CONVERT_EXPR_P (op))
8377 {
8378 int bitschange;
8379
8380 /* TYPE_PRECISION on vector types has different meaning
8381 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8382 so avoid them here. */
8383 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8384 break;
8385
8386 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8387 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8388
8389 /* Truncations are many-one so cannot be removed.
8390 Unless we are later going to truncate down even farther. */
8391 if (bitschange < 0
8392 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8393 break;
8394
8395 /* See what's inside this conversion. If we decide to strip it,
8396 we will set WIN. */
8397 op = TREE_OPERAND (op, 0);
8398
8399 /* If we have not stripped any zero-extensions (uns is 0),
8400 we can strip any kind of extension.
8401 If we have previously stripped a zero-extension,
8402 only zero-extensions can safely be stripped.
8403 Any extension can be stripped if the bits it would produce
8404 are all going to be discarded later by truncating to FOR_TYPE. */
8405
8406 if (bitschange > 0)
8407 {
8408 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8409 win = op;
8410 /* TYPE_UNSIGNED says whether this is a zero-extension.
8411 Let's avoid computing it if it does not affect WIN
8412 and if UNS will not be needed again. */
8413 if ((uns
8414 || CONVERT_EXPR_P (op))
8415 && TYPE_UNSIGNED (TREE_TYPE (op)))
8416 {
8417 uns = 1;
8418 win = op;
8419 }
8420 }
8421 }
8422
8423 /* If we finally reach a constant see if it fits in for_type and
8424 in that case convert it. */
8425 if (for_type
8426 && TREE_CODE (win) == INTEGER_CST
8427 && TREE_TYPE (win) != for_type
8428 && int_fits_type_p (win, for_type))
8429 win = fold_convert (for_type, win);
8430
8431 return win;
8432 }
8433 \f
8434 /* Return OP or a simpler expression for a narrower value
8435 which can be sign-extended or zero-extended to give back OP.
8436 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8437 or 0 if the value should be sign-extended. */
8438
8439 tree
8440 get_narrower (tree op, int *unsignedp_ptr)
8441 {
8442 int uns = 0;
8443 int first = 1;
8444 tree win = op;
8445 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8446
8447 while (TREE_CODE (op) == NOP_EXPR)
8448 {
8449 int bitschange
8450 = (TYPE_PRECISION (TREE_TYPE (op))
8451 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8452
8453 /* Truncations are many-one so cannot be removed. */
8454 if (bitschange < 0)
8455 break;
8456
8457 /* See what's inside this conversion. If we decide to strip it,
8458 we will set WIN. */
8459
8460 if (bitschange > 0)
8461 {
8462 op = TREE_OPERAND (op, 0);
8463 /* An extension: the outermost one can be stripped,
8464 but remember whether it is zero or sign extension. */
8465 if (first)
8466 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8467 /* Otherwise, if a sign extension has been stripped,
8468 only sign extensions can now be stripped;
8469 if a zero extension has been stripped, only zero-extensions. */
8470 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8471 break;
8472 first = 0;
8473 }
8474 else /* bitschange == 0 */
8475 {
8476 /* A change in nominal type can always be stripped, but we must
8477 preserve the unsignedness. */
8478 if (first)
8479 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8480 first = 0;
8481 op = TREE_OPERAND (op, 0);
8482 /* Keep trying to narrow, but don't assign op to win if it
8483 would turn an integral type into something else. */
8484 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8485 continue;
8486 }
8487
8488 win = op;
8489 }
8490
8491 if (TREE_CODE (op) == COMPONENT_REF
8492 /* Since type_for_size always gives an integer type. */
8493 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8494 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8495 /* Ensure field is laid out already. */
8496 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8497 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8498 {
8499 unsigned HOST_WIDE_INT innerprec
8500 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8501 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8502 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8503 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8504
8505 /* We can get this structure field in a narrower type that fits it,
8506 but the resulting extension to its nominal type (a fullword type)
8507 must satisfy the same conditions as for other extensions.
8508
8509 Do this only for fields that are aligned (not bit-fields),
8510 because when bit-field insns will be used there is no
8511 advantage in doing this. */
8512
8513 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8514 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8515 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8516 && type != 0)
8517 {
8518 if (first)
8519 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8520 win = fold_convert (type, op);
8521 }
8522 }
8523
8524 *unsignedp_ptr = uns;
8525 return win;
8526 }
8527 \f
8528 /* Returns true if integer constant C has a value that is permissible
8529 for type TYPE (an INTEGER_TYPE). */
8530
8531 bool
8532 int_fits_type_p (const_tree c, const_tree type)
8533 {
8534 tree type_low_bound, type_high_bound;
8535 bool ok_for_low_bound, ok_for_high_bound;
8536 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8537
8538 retry:
8539 type_low_bound = TYPE_MIN_VALUE (type);
8540 type_high_bound = TYPE_MAX_VALUE (type);
8541
8542 /* If at least one bound of the type is a constant integer, we can check
8543 ourselves and maybe make a decision. If no such decision is possible, but
8544 this type is a subtype, try checking against that. Otherwise, use
8545 fits_to_tree_p, which checks against the precision.
8546
8547 Compute the status for each possibly constant bound, and return if we see
8548 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8549 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8550 for "constant known to fit". */
8551
8552 /* Check if c >= type_low_bound. */
8553 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8554 {
8555 if (INT_CST_LT (c, type_low_bound))
8556 return false;
8557 ok_for_low_bound = true;
8558 }
8559 else
8560 ok_for_low_bound = false;
8561
8562 /* Check if c <= type_high_bound. */
8563 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8564 {
8565 if (INT_CST_LT (type_high_bound, c))
8566 return false;
8567 ok_for_high_bound = true;
8568 }
8569 else
8570 ok_for_high_bound = false;
8571
8572 /* If the constant fits both bounds, the result is known. */
8573 if (ok_for_low_bound && ok_for_high_bound)
8574 return true;
8575
8576 /* Perform some generic filtering which may allow making a decision
8577 even if the bounds are not constant. First, negative integers
8578 never fit in unsigned types, */
8579 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (c))
8580 return false;
8581
8582 /* Second, narrower types always fit in wider ones. */
8583 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8584 return true;
8585
8586 /* Third, unsigned integers with top bit set never fit signed types. */
8587 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8588 {
8589 int prec = GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (c))) - 1;
8590 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8591 {
8592 /* When a tree_cst is converted to a wide-int, the precision
8593 is taken from the type. However, if the precision of the
8594 mode underneath the type is smaller than that, it is
8595 possible that the value will not fit. The test below
8596 fails if any bit is set between the sign bit of the
8597 underlying mode and the top bit of the type. */
8598 if (wi::ne_p (wi::zext (c, prec - 1), c))
8599 return false;
8600 }
8601 else if (wi::neg_p (c))
8602 return false;
8603 }
8604
8605 /* If we haven't been able to decide at this point, there nothing more we
8606 can check ourselves here. Look at the base type if we have one and it
8607 has the same precision. */
8608 if (TREE_CODE (type) == INTEGER_TYPE
8609 && TREE_TYPE (type) != 0
8610 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8611 {
8612 type = TREE_TYPE (type);
8613 goto retry;
8614 }
8615
8616 /* Or to fits_to_tree_p, if nothing else. */
8617 return wi::fits_to_tree_p (c, type);
8618 }
8619
8620 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8621 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8622 represented (assuming two's-complement arithmetic) within the bit
8623 precision of the type are returned instead. */
8624
8625 void
8626 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8627 {
8628 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8629 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8630 wi::to_mpz (TYPE_MIN_VALUE (type), min, TYPE_SIGN (type));
8631 else
8632 {
8633 if (TYPE_UNSIGNED (type))
8634 mpz_set_ui (min, 0);
8635 else
8636 {
8637 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8638 wi::to_mpz (mn, min, SIGNED);
8639 }
8640 }
8641
8642 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8643 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8644 wi::to_mpz (TYPE_MAX_VALUE (type), max, TYPE_SIGN (type));
8645 else
8646 {
8647 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8648 wi::to_mpz (mn, max, TYPE_SIGN (type));
8649 }
8650 }
8651
8652 /* Return true if VAR is an automatic variable defined in function FN. */
8653
8654 bool
8655 auto_var_in_fn_p (const_tree var, const_tree fn)
8656 {
8657 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8658 && ((((TREE_CODE (var) == VAR_DECL && ! DECL_EXTERNAL (var))
8659 || TREE_CODE (var) == PARM_DECL)
8660 && ! TREE_STATIC (var))
8661 || TREE_CODE (var) == LABEL_DECL
8662 || TREE_CODE (var) == RESULT_DECL));
8663 }
8664
8665 /* Subprogram of following function. Called by walk_tree.
8666
8667 Return *TP if it is an automatic variable or parameter of the
8668 function passed in as DATA. */
8669
8670 static tree
8671 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8672 {
8673 tree fn = (tree) data;
8674
8675 if (TYPE_P (*tp))
8676 *walk_subtrees = 0;
8677
8678 else if (DECL_P (*tp)
8679 && auto_var_in_fn_p (*tp, fn))
8680 return *tp;
8681
8682 return NULL_TREE;
8683 }
8684
8685 /* Returns true if T is, contains, or refers to a type with variable
8686 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8687 arguments, but not the return type. If FN is nonzero, only return
8688 true if a modifier of the type or position of FN is a variable or
8689 parameter inside FN.
8690
8691 This concept is more general than that of C99 'variably modified types':
8692 in C99, a struct type is never variably modified because a VLA may not
8693 appear as a structure member. However, in GNU C code like:
8694
8695 struct S { int i[f()]; };
8696
8697 is valid, and other languages may define similar constructs. */
8698
8699 bool
8700 variably_modified_type_p (tree type, tree fn)
8701 {
8702 tree t;
8703
8704 /* Test if T is either variable (if FN is zero) or an expression containing
8705 a variable in FN. If TYPE isn't gimplified, return true also if
8706 gimplify_one_sizepos would gimplify the expression into a local
8707 variable. */
8708 #define RETURN_TRUE_IF_VAR(T) \
8709 do { tree _t = (T); \
8710 if (_t != NULL_TREE \
8711 && _t != error_mark_node \
8712 && TREE_CODE (_t) != INTEGER_CST \
8713 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8714 && (!fn \
8715 || (!TYPE_SIZES_GIMPLIFIED (type) \
8716 && !is_gimple_sizepos (_t)) \
8717 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8718 return true; } while (0)
8719
8720 if (type == error_mark_node)
8721 return false;
8722
8723 /* If TYPE itself has variable size, it is variably modified. */
8724 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8725 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8726
8727 switch (TREE_CODE (type))
8728 {
8729 case POINTER_TYPE:
8730 case REFERENCE_TYPE:
8731 case VECTOR_TYPE:
8732 if (variably_modified_type_p (TREE_TYPE (type), fn))
8733 return true;
8734 break;
8735
8736 case FUNCTION_TYPE:
8737 case METHOD_TYPE:
8738 /* If TYPE is a function type, it is variably modified if the
8739 return type is variably modified. */
8740 if (variably_modified_type_p (TREE_TYPE (type), fn))
8741 return true;
8742 break;
8743
8744 case INTEGER_TYPE:
8745 case REAL_TYPE:
8746 case FIXED_POINT_TYPE:
8747 case ENUMERAL_TYPE:
8748 case BOOLEAN_TYPE:
8749 /* Scalar types are variably modified if their end points
8750 aren't constant. */
8751 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8752 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8753 break;
8754
8755 case RECORD_TYPE:
8756 case UNION_TYPE:
8757 case QUAL_UNION_TYPE:
8758 /* We can't see if any of the fields are variably-modified by the
8759 definition we normally use, since that would produce infinite
8760 recursion via pointers. */
8761 /* This is variably modified if some field's type is. */
8762 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8763 if (TREE_CODE (t) == FIELD_DECL)
8764 {
8765 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8766 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8767 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8768
8769 if (TREE_CODE (type) == QUAL_UNION_TYPE)
8770 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
8771 }
8772 break;
8773
8774 case ARRAY_TYPE:
8775 /* Do not call ourselves to avoid infinite recursion. This is
8776 variably modified if the element type is. */
8777 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
8778 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
8779 break;
8780
8781 default:
8782 break;
8783 }
8784
8785 /* The current language may have other cases to check, but in general,
8786 all other types are not variably modified. */
8787 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
8788
8789 #undef RETURN_TRUE_IF_VAR
8790 }
8791
8792 /* Given a DECL or TYPE, return the scope in which it was declared, or
8793 NULL_TREE if there is no containing scope. */
8794
8795 tree
8796 get_containing_scope (const_tree t)
8797 {
8798 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
8799 }
8800
8801 /* Return the innermost context enclosing DECL that is
8802 a FUNCTION_DECL, or zero if none. */
8803
8804 tree
8805 decl_function_context (const_tree decl)
8806 {
8807 tree context;
8808
8809 if (TREE_CODE (decl) == ERROR_MARK)
8810 return 0;
8811
8812 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8813 where we look up the function at runtime. Such functions always take
8814 a first argument of type 'pointer to real context'.
8815
8816 C++ should really be fixed to use DECL_CONTEXT for the real context,
8817 and use something else for the "virtual context". */
8818 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
8819 context
8820 = TYPE_MAIN_VARIANT
8821 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
8822 else
8823 context = DECL_CONTEXT (decl);
8824
8825 while (context && TREE_CODE (context) != FUNCTION_DECL)
8826 {
8827 if (TREE_CODE (context) == BLOCK)
8828 context = BLOCK_SUPERCONTEXT (context);
8829 else
8830 context = get_containing_scope (context);
8831 }
8832
8833 return context;
8834 }
8835
8836 /* Return the innermost context enclosing DECL that is
8837 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8838 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8839
8840 tree
8841 decl_type_context (const_tree decl)
8842 {
8843 tree context = DECL_CONTEXT (decl);
8844
8845 while (context)
8846 switch (TREE_CODE (context))
8847 {
8848 case NAMESPACE_DECL:
8849 case TRANSLATION_UNIT_DECL:
8850 return NULL_TREE;
8851
8852 case RECORD_TYPE:
8853 case UNION_TYPE:
8854 case QUAL_UNION_TYPE:
8855 return context;
8856
8857 case TYPE_DECL:
8858 case FUNCTION_DECL:
8859 context = DECL_CONTEXT (context);
8860 break;
8861
8862 case BLOCK:
8863 context = BLOCK_SUPERCONTEXT (context);
8864 break;
8865
8866 default:
8867 gcc_unreachable ();
8868 }
8869
8870 return NULL_TREE;
8871 }
8872
8873 /* CALL is a CALL_EXPR. Return the declaration for the function
8874 called, or NULL_TREE if the called function cannot be
8875 determined. */
8876
8877 tree
8878 get_callee_fndecl (const_tree call)
8879 {
8880 tree addr;
8881
8882 if (call == error_mark_node)
8883 return error_mark_node;
8884
8885 /* It's invalid to call this function with anything but a
8886 CALL_EXPR. */
8887 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8888
8889 /* The first operand to the CALL is the address of the function
8890 called. */
8891 addr = CALL_EXPR_FN (call);
8892
8893 STRIP_NOPS (addr);
8894
8895 /* If this is a readonly function pointer, extract its initial value. */
8896 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
8897 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
8898 && DECL_INITIAL (addr))
8899 addr = DECL_INITIAL (addr);
8900
8901 /* If the address is just `&f' for some function `f', then we know
8902 that `f' is being called. */
8903 if (TREE_CODE (addr) == ADDR_EXPR
8904 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
8905 return TREE_OPERAND (addr, 0);
8906
8907 /* We couldn't figure out what was being called. */
8908 return NULL_TREE;
8909 }
8910
8911 /* Print debugging information about tree nodes generated during the compile,
8912 and any language-specific information. */
8913
8914 void
8915 dump_tree_statistics (void)
8916 {
8917 if (GATHER_STATISTICS)
8918 {
8919 int i;
8920 int total_nodes, total_bytes;
8921 fprintf (stderr, "Kind Nodes Bytes\n");
8922 fprintf (stderr, "---------------------------------------\n");
8923 total_nodes = total_bytes = 0;
8924 for (i = 0; i < (int) all_kinds; i++)
8925 {
8926 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i],
8927 tree_node_counts[i], tree_node_sizes[i]);
8928 total_nodes += tree_node_counts[i];
8929 total_bytes += tree_node_sizes[i];
8930 }
8931 fprintf (stderr, "---------------------------------------\n");
8932 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes);
8933 fprintf (stderr, "---------------------------------------\n");
8934 fprintf (stderr, "Code Nodes\n");
8935 fprintf (stderr, "----------------------------\n");
8936 for (i = 0; i < (int) MAX_TREE_CODES; i++)
8937 fprintf (stderr, "%-20s %7d\n", get_tree_code_name ((enum tree_code) i),
8938 tree_code_counts[i]);
8939 fprintf (stderr, "----------------------------\n");
8940 ssanames_print_statistics ();
8941 phinodes_print_statistics ();
8942 }
8943 else
8944 fprintf (stderr, "(No per-node statistics)\n");
8945
8946 print_type_hash_statistics ();
8947 print_debug_expr_statistics ();
8948 print_value_expr_statistics ();
8949 lang_hooks.print_statistics ();
8950 }
8951 \f
8952 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
8953
8954 /* Generate a crc32 of a byte. */
8955
8956 static unsigned
8957 crc32_unsigned_bits (unsigned chksum, unsigned value, unsigned bits)
8958 {
8959 unsigned ix;
8960
8961 for (ix = bits; ix--; value <<= 1)
8962 {
8963 unsigned feedback;
8964
8965 feedback = (value ^ chksum) & 0x80000000 ? 0x04c11db7 : 0;
8966 chksum <<= 1;
8967 chksum ^= feedback;
8968 }
8969 return chksum;
8970 }
8971
8972 /* Generate a crc32 of a 32-bit unsigned. */
8973
8974 unsigned
8975 crc32_unsigned (unsigned chksum, unsigned value)
8976 {
8977 return crc32_unsigned_bits (chksum, value, 32);
8978 }
8979
8980 /* Generate a crc32 of a byte. */
8981
8982 unsigned
8983 crc32_byte (unsigned chksum, char byte)
8984 {
8985 return crc32_unsigned_bits (chksum, (unsigned) byte << 24, 8);
8986 }
8987
8988 /* Generate a crc32 of a string. */
8989
8990 unsigned
8991 crc32_string (unsigned chksum, const char *string)
8992 {
8993 do
8994 {
8995 chksum = crc32_byte (chksum, *string);
8996 }
8997 while (*string++);
8998 return chksum;
8999 }
9000
9001 /* P is a string that will be used in a symbol. Mask out any characters
9002 that are not valid in that context. */
9003
9004 void
9005 clean_symbol_name (char *p)
9006 {
9007 for (; *p; p++)
9008 if (! (ISALNUM (*p)
9009 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9010 || *p == '$'
9011 #endif
9012 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9013 || *p == '.'
9014 #endif
9015 ))
9016 *p = '_';
9017 }
9018
9019 /* Generate a name for a special-purpose function.
9020 The generated name may need to be unique across the whole link.
9021 Changes to this function may also require corresponding changes to
9022 xstrdup_mask_random.
9023 TYPE is some string to identify the purpose of this function to the
9024 linker or collect2; it must start with an uppercase letter,
9025 one of:
9026 I - for constructors
9027 D - for destructors
9028 N - for C++ anonymous namespaces
9029 F - for DWARF unwind frame information. */
9030
9031 tree
9032 get_file_function_name (const char *type)
9033 {
9034 char *buf;
9035 const char *p;
9036 char *q;
9037
9038 /* If we already have a name we know to be unique, just use that. */
9039 if (first_global_object_name)
9040 p = q = ASTRDUP (first_global_object_name);
9041 /* If the target is handling the constructors/destructors, they
9042 will be local to this file and the name is only necessary for
9043 debugging purposes.
9044 We also assign sub_I and sub_D sufixes to constructors called from
9045 the global static constructors. These are always local. */
9046 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9047 || (strncmp (type, "sub_", 4) == 0
9048 && (type[4] == 'I' || type[4] == 'D')))
9049 {
9050 const char *file = main_input_filename;
9051 if (! file)
9052 file = input_filename;
9053 /* Just use the file's basename, because the full pathname
9054 might be quite long. */
9055 p = q = ASTRDUP (lbasename (file));
9056 }
9057 else
9058 {
9059 /* Otherwise, the name must be unique across the entire link.
9060 We don't have anything that we know to be unique to this translation
9061 unit, so use what we do have and throw in some randomness. */
9062 unsigned len;
9063 const char *name = weak_global_object_name;
9064 const char *file = main_input_filename;
9065
9066 if (! name)
9067 name = "";
9068 if (! file)
9069 file = input_filename;
9070
9071 len = strlen (file);
9072 q = (char *) alloca (9 + 17 + len + 1);
9073 memcpy (q, file, len + 1);
9074
9075 snprintf (q + len, 9 + 17 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9076 crc32_string (0, name), get_random_seed (false));
9077
9078 p = q;
9079 }
9080
9081 clean_symbol_name (q);
9082 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9083 + strlen (type));
9084
9085 /* Set up the name of the file-level functions we may need.
9086 Use a global object (which is already required to be unique over
9087 the program) rather than the file name (which imposes extra
9088 constraints). */
9089 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9090
9091 return get_identifier (buf);
9092 }
9093 \f
9094 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9095
9096 /* Complain that the tree code of NODE does not match the expected 0
9097 terminated list of trailing codes. The trailing code list can be
9098 empty, for a more vague error message. FILE, LINE, and FUNCTION
9099 are of the caller. */
9100
9101 void
9102 tree_check_failed (const_tree node, const char *file,
9103 int line, const char *function, ...)
9104 {
9105 va_list args;
9106 const char *buffer;
9107 unsigned length = 0;
9108 enum tree_code code;
9109
9110 va_start (args, function);
9111 while ((code = (enum tree_code) va_arg (args, int)))
9112 length += 4 + strlen (get_tree_code_name (code));
9113 va_end (args);
9114 if (length)
9115 {
9116 char *tmp;
9117 va_start (args, function);
9118 length += strlen ("expected ");
9119 buffer = tmp = (char *) alloca (length);
9120 length = 0;
9121 while ((code = (enum tree_code) va_arg (args, int)))
9122 {
9123 const char *prefix = length ? " or " : "expected ";
9124
9125 strcpy (tmp + length, prefix);
9126 length += strlen (prefix);
9127 strcpy (tmp + length, get_tree_code_name (code));
9128 length += strlen (get_tree_code_name (code));
9129 }
9130 va_end (args);
9131 }
9132 else
9133 buffer = "unexpected node";
9134
9135 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9136 buffer, get_tree_code_name (TREE_CODE (node)),
9137 function, trim_filename (file), line);
9138 }
9139
9140 /* Complain that the tree code of NODE does match the expected 0
9141 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9142 the caller. */
9143
9144 void
9145 tree_not_check_failed (const_tree node, const char *file,
9146 int line, const char *function, ...)
9147 {
9148 va_list args;
9149 char *buffer;
9150 unsigned length = 0;
9151 enum tree_code code;
9152
9153 va_start (args, function);
9154 while ((code = (enum tree_code) va_arg (args, int)))
9155 length += 4 + strlen (get_tree_code_name (code));
9156 va_end (args);
9157 va_start (args, function);
9158 buffer = (char *) alloca (length);
9159 length = 0;
9160 while ((code = (enum tree_code) va_arg (args, int)))
9161 {
9162 if (length)
9163 {
9164 strcpy (buffer + length, " or ");
9165 length += 4;
9166 }
9167 strcpy (buffer + length, get_tree_code_name (code));
9168 length += strlen (get_tree_code_name (code));
9169 }
9170 va_end (args);
9171
9172 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9173 buffer, get_tree_code_name (TREE_CODE (node)),
9174 function, trim_filename (file), line);
9175 }
9176
9177 /* Similar to tree_check_failed, except that we check for a class of tree
9178 code, given in CL. */
9179
9180 void
9181 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9182 const char *file, int line, const char *function)
9183 {
9184 internal_error
9185 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9186 TREE_CODE_CLASS_STRING (cl),
9187 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9188 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9189 }
9190
9191 /* Similar to tree_check_failed, except that instead of specifying a
9192 dozen codes, use the knowledge that they're all sequential. */
9193
9194 void
9195 tree_range_check_failed (const_tree node, const char *file, int line,
9196 const char *function, enum tree_code c1,
9197 enum tree_code c2)
9198 {
9199 char *buffer;
9200 unsigned length = 0;
9201 unsigned int c;
9202
9203 for (c = c1; c <= c2; ++c)
9204 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9205
9206 length += strlen ("expected ");
9207 buffer = (char *) alloca (length);
9208 length = 0;
9209
9210 for (c = c1; c <= c2; ++c)
9211 {
9212 const char *prefix = length ? " or " : "expected ";
9213
9214 strcpy (buffer + length, prefix);
9215 length += strlen (prefix);
9216 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9217 length += strlen (get_tree_code_name ((enum tree_code) c));
9218 }
9219
9220 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9221 buffer, get_tree_code_name (TREE_CODE (node)),
9222 function, trim_filename (file), line);
9223 }
9224
9225
9226 /* Similar to tree_check_failed, except that we check that a tree does
9227 not have the specified code, given in CL. */
9228
9229 void
9230 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9231 const char *file, int line, const char *function)
9232 {
9233 internal_error
9234 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9235 TREE_CODE_CLASS_STRING (cl),
9236 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9237 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9238 }
9239
9240
9241 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9242
9243 void
9244 omp_clause_check_failed (const_tree node, const char *file, int line,
9245 const char *function, enum omp_clause_code code)
9246 {
9247 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9248 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9249 function, trim_filename (file), line);
9250 }
9251
9252
9253 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9254
9255 void
9256 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9257 const char *function, enum omp_clause_code c1,
9258 enum omp_clause_code c2)
9259 {
9260 char *buffer;
9261 unsigned length = 0;
9262 unsigned int c;
9263
9264 for (c = c1; c <= c2; ++c)
9265 length += 4 + strlen (omp_clause_code_name[c]);
9266
9267 length += strlen ("expected ");
9268 buffer = (char *) alloca (length);
9269 length = 0;
9270
9271 for (c = c1; c <= c2; ++c)
9272 {
9273 const char *prefix = length ? " or " : "expected ";
9274
9275 strcpy (buffer + length, prefix);
9276 length += strlen (prefix);
9277 strcpy (buffer + length, omp_clause_code_name[c]);
9278 length += strlen (omp_clause_code_name[c]);
9279 }
9280
9281 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9282 buffer, omp_clause_code_name[TREE_CODE (node)],
9283 function, trim_filename (file), line);
9284 }
9285
9286
9287 #undef DEFTREESTRUCT
9288 #define DEFTREESTRUCT(VAL, NAME) NAME,
9289
9290 static const char *ts_enum_names[] = {
9291 #include "treestruct.def"
9292 };
9293 #undef DEFTREESTRUCT
9294
9295 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9296
9297 /* Similar to tree_class_check_failed, except that we check for
9298 whether CODE contains the tree structure identified by EN. */
9299
9300 void
9301 tree_contains_struct_check_failed (const_tree node,
9302 const enum tree_node_structure_enum en,
9303 const char *file, int line,
9304 const char *function)
9305 {
9306 internal_error
9307 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9308 TS_ENUM_NAME (en),
9309 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9310 }
9311
9312
9313 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9314 (dynamically sized) vector. */
9315
9316 void
9317 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9318 const char *function)
9319 {
9320 internal_error
9321 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9322 idx + 1, len, function, trim_filename (file), line);
9323 }
9324
9325 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9326 (dynamically sized) vector. */
9327
9328 void
9329 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9330 const char *function)
9331 {
9332 internal_error
9333 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9334 idx + 1, len, function, trim_filename (file), line);
9335 }
9336
9337 /* Similar to above, except that the check is for the bounds of the operand
9338 vector of an expression node EXP. */
9339
9340 void
9341 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9342 int line, const char *function)
9343 {
9344 enum tree_code code = TREE_CODE (exp);
9345 internal_error
9346 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9347 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9348 function, trim_filename (file), line);
9349 }
9350
9351 /* Similar to above, except that the check is for the number of
9352 operands of an OMP_CLAUSE node. */
9353
9354 void
9355 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9356 int line, const char *function)
9357 {
9358 internal_error
9359 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9360 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9361 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9362 trim_filename (file), line);
9363 }
9364 #endif /* ENABLE_TREE_CHECKING */
9365 \f
9366 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9367 and mapped to the machine mode MODE. Initialize its fields and build
9368 the information necessary for debugging output. */
9369
9370 static tree
9371 make_vector_type (tree innertype, int nunits, enum machine_mode mode)
9372 {
9373 tree t;
9374 hashval_t hashcode = 0;
9375
9376 t = make_node (VECTOR_TYPE);
9377 TREE_TYPE (t) = TYPE_MAIN_VARIANT (innertype);
9378 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9379 SET_TYPE_MODE (t, mode);
9380
9381 if (TYPE_STRUCTURAL_EQUALITY_P (innertype))
9382 SET_TYPE_STRUCTURAL_EQUALITY (t);
9383 else if (TYPE_CANONICAL (innertype) != innertype
9384 || mode != VOIDmode)
9385 TYPE_CANONICAL (t)
9386 = make_vector_type (TYPE_CANONICAL (innertype), nunits, VOIDmode);
9387
9388 layout_type (t);
9389
9390 hashcode = iterative_hash_host_wide_int (VECTOR_TYPE, hashcode);
9391 hashcode = iterative_hash_host_wide_int (nunits, hashcode);
9392 hashcode = iterative_hash_host_wide_int (mode, hashcode);
9393 hashcode = iterative_hash_object (TYPE_HASH (TREE_TYPE (t)), hashcode);
9394 t = type_hash_canon (hashcode, t);
9395
9396 /* We have built a main variant, based on the main variant of the
9397 inner type. Use it to build the variant we return. */
9398 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9399 && TREE_TYPE (t) != innertype)
9400 return build_type_attribute_qual_variant (t,
9401 TYPE_ATTRIBUTES (innertype),
9402 TYPE_QUALS (innertype));
9403
9404 return t;
9405 }
9406
9407 static tree
9408 make_or_reuse_type (unsigned size, int unsignedp)
9409 {
9410 if (size == INT_TYPE_SIZE)
9411 return unsignedp ? unsigned_type_node : integer_type_node;
9412 if (size == CHAR_TYPE_SIZE)
9413 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9414 if (size == SHORT_TYPE_SIZE)
9415 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9416 if (size == LONG_TYPE_SIZE)
9417 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9418 if (size == LONG_LONG_TYPE_SIZE)
9419 return (unsignedp ? long_long_unsigned_type_node
9420 : long_long_integer_type_node);
9421 if (size == 128 && int128_integer_type_node)
9422 return (unsignedp ? int128_unsigned_type_node
9423 : int128_integer_type_node);
9424
9425 if (unsignedp)
9426 return make_unsigned_type (size);
9427 else
9428 return make_signed_type (size);
9429 }
9430
9431 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9432
9433 static tree
9434 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9435 {
9436 if (satp)
9437 {
9438 if (size == SHORT_FRACT_TYPE_SIZE)
9439 return unsignedp ? sat_unsigned_short_fract_type_node
9440 : sat_short_fract_type_node;
9441 if (size == FRACT_TYPE_SIZE)
9442 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9443 if (size == LONG_FRACT_TYPE_SIZE)
9444 return unsignedp ? sat_unsigned_long_fract_type_node
9445 : sat_long_fract_type_node;
9446 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9447 return unsignedp ? sat_unsigned_long_long_fract_type_node
9448 : sat_long_long_fract_type_node;
9449 }
9450 else
9451 {
9452 if (size == SHORT_FRACT_TYPE_SIZE)
9453 return unsignedp ? unsigned_short_fract_type_node
9454 : short_fract_type_node;
9455 if (size == FRACT_TYPE_SIZE)
9456 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9457 if (size == LONG_FRACT_TYPE_SIZE)
9458 return unsignedp ? unsigned_long_fract_type_node
9459 : long_fract_type_node;
9460 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9461 return unsignedp ? unsigned_long_long_fract_type_node
9462 : long_long_fract_type_node;
9463 }
9464
9465 return make_fract_type (size, unsignedp, satp);
9466 }
9467
9468 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9469
9470 static tree
9471 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9472 {
9473 if (satp)
9474 {
9475 if (size == SHORT_ACCUM_TYPE_SIZE)
9476 return unsignedp ? sat_unsigned_short_accum_type_node
9477 : sat_short_accum_type_node;
9478 if (size == ACCUM_TYPE_SIZE)
9479 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9480 if (size == LONG_ACCUM_TYPE_SIZE)
9481 return unsignedp ? sat_unsigned_long_accum_type_node
9482 : sat_long_accum_type_node;
9483 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9484 return unsignedp ? sat_unsigned_long_long_accum_type_node
9485 : sat_long_long_accum_type_node;
9486 }
9487 else
9488 {
9489 if (size == SHORT_ACCUM_TYPE_SIZE)
9490 return unsignedp ? unsigned_short_accum_type_node
9491 : short_accum_type_node;
9492 if (size == ACCUM_TYPE_SIZE)
9493 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9494 if (size == LONG_ACCUM_TYPE_SIZE)
9495 return unsignedp ? unsigned_long_accum_type_node
9496 : long_accum_type_node;
9497 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9498 return unsignedp ? unsigned_long_long_accum_type_node
9499 : long_long_accum_type_node;
9500 }
9501
9502 return make_accum_type (size, unsignedp, satp);
9503 }
9504
9505
9506 /* Create an atomic variant node for TYPE. This routine is called
9507 during initialization of data types to create the 5 basic atomic
9508 types. The generic build_variant_type function requires these to
9509 already be set up in order to function properly, so cannot be
9510 called from there. */
9511
9512 static tree
9513 build_atomic_base (tree type)
9514 {
9515 tree t;
9516
9517 /* Make sure its not already registered. */
9518 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9519 return t;
9520
9521 t = build_variant_type_copy (type);
9522 set_type_quals (t, TYPE_QUAL_ATOMIC);
9523
9524 return t;
9525 }
9526
9527 /* Create nodes for all integer types (and error_mark_node) using the sizes
9528 of C datatypes. SIGNED_CHAR specifies whether char is signed,
9529 SHORT_DOUBLE specifies whether double should be of the same precision
9530 as float. */
9531
9532 void
9533 build_common_tree_nodes (bool signed_char, bool short_double)
9534 {
9535 error_mark_node = make_node (ERROR_MARK);
9536 TREE_TYPE (error_mark_node) = error_mark_node;
9537
9538 initialize_sizetypes ();
9539
9540 /* Define both `signed char' and `unsigned char'. */
9541 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9542 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9543 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9544 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9545
9546 /* Define `char', which is like either `signed char' or `unsigned char'
9547 but not the same as either. */
9548 char_type_node
9549 = (signed_char
9550 ? make_signed_type (CHAR_TYPE_SIZE)
9551 : make_unsigned_type (CHAR_TYPE_SIZE));
9552 TYPE_STRING_FLAG (char_type_node) = 1;
9553
9554 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9555 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9556 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9557 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9558 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9559 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9560 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9561 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9562 #if HOST_BITS_PER_WIDE_INT >= 64
9563 /* TODO: This isn't correct, but as logic depends at the moment on
9564 host's instead of target's wide-integer.
9565 If there is a target not supporting TImode, but has an 128-bit
9566 integer-scalar register, this target check needs to be adjusted. */
9567 if (targetm.scalar_mode_supported_p (TImode))
9568 {
9569 int128_integer_type_node = make_signed_type (128);
9570 int128_unsigned_type_node = make_unsigned_type (128);
9571 }
9572 #endif
9573
9574 /* Define a boolean type. This type only represents boolean values but
9575 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9576 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9577 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9578 TYPE_PRECISION (boolean_type_node) = 1;
9579 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9580
9581 /* Define what type to use for size_t. */
9582 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9583 size_type_node = unsigned_type_node;
9584 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9585 size_type_node = long_unsigned_type_node;
9586 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9587 size_type_node = long_long_unsigned_type_node;
9588 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9589 size_type_node = short_unsigned_type_node;
9590 else
9591 gcc_unreachable ();
9592
9593 /* Fill in the rest of the sized types. Reuse existing type nodes
9594 when possible. */
9595 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9596 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9597 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9598 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9599 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9600
9601 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9602 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9603 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9604 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9605 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9606
9607 /* Don't call build_qualified type for atomics. That routine does
9608 special processing for atomics, and until they are initialized
9609 it's better not to make that call. */
9610
9611 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node);
9612 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node);
9613 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node);
9614 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node);
9615 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node);
9616
9617 access_public_node = get_identifier ("public");
9618 access_protected_node = get_identifier ("protected");
9619 access_private_node = get_identifier ("private");
9620
9621 /* Define these next since types below may used them. */
9622 integer_zero_node = build_int_cst (integer_type_node, 0);
9623 integer_one_node = build_int_cst (integer_type_node, 1);
9624 integer_three_node = build_int_cst (integer_type_node, 3);
9625 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9626
9627 size_zero_node = size_int (0);
9628 size_one_node = size_int (1);
9629 bitsize_zero_node = bitsize_int (0);
9630 bitsize_one_node = bitsize_int (1);
9631 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9632
9633 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9634 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9635
9636 void_type_node = make_node (VOID_TYPE);
9637 layout_type (void_type_node);
9638
9639 pointer_bounds_type_node = targetm.chkp_bound_type ();
9640
9641 /* We are not going to have real types in C with less than byte alignment,
9642 so we might as well not have any types that claim to have it. */
9643 TYPE_ALIGN (void_type_node) = BITS_PER_UNIT;
9644 TYPE_USER_ALIGN (void_type_node) = 0;
9645
9646 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9647 layout_type (TREE_TYPE (null_pointer_node));
9648
9649 ptr_type_node = build_pointer_type (void_type_node);
9650 const_ptr_type_node
9651 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9652 fileptr_type_node = ptr_type_node;
9653
9654 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9655
9656 float_type_node = make_node (REAL_TYPE);
9657 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9658 layout_type (float_type_node);
9659
9660 double_type_node = make_node (REAL_TYPE);
9661 if (short_double)
9662 TYPE_PRECISION (double_type_node) = FLOAT_TYPE_SIZE;
9663 else
9664 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9665 layout_type (double_type_node);
9666
9667 long_double_type_node = make_node (REAL_TYPE);
9668 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9669 layout_type (long_double_type_node);
9670
9671 float_ptr_type_node = build_pointer_type (float_type_node);
9672 double_ptr_type_node = build_pointer_type (double_type_node);
9673 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9674 integer_ptr_type_node = build_pointer_type (integer_type_node);
9675
9676 /* Fixed size integer types. */
9677 uint16_type_node = build_nonstandard_integer_type (16, true);
9678 uint32_type_node = build_nonstandard_integer_type (32, true);
9679 uint64_type_node = build_nonstandard_integer_type (64, true);
9680
9681 /* Decimal float types. */
9682 dfloat32_type_node = make_node (REAL_TYPE);
9683 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9684 layout_type (dfloat32_type_node);
9685 SET_TYPE_MODE (dfloat32_type_node, SDmode);
9686 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
9687
9688 dfloat64_type_node = make_node (REAL_TYPE);
9689 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9690 layout_type (dfloat64_type_node);
9691 SET_TYPE_MODE (dfloat64_type_node, DDmode);
9692 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
9693
9694 dfloat128_type_node = make_node (REAL_TYPE);
9695 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9696 layout_type (dfloat128_type_node);
9697 SET_TYPE_MODE (dfloat128_type_node, TDmode);
9698 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
9699
9700 complex_integer_type_node = build_complex_type (integer_type_node);
9701 complex_float_type_node = build_complex_type (float_type_node);
9702 complex_double_type_node = build_complex_type (double_type_node);
9703 complex_long_double_type_node = build_complex_type (long_double_type_node);
9704
9705 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9706 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9707 sat_ ## KIND ## _type_node = \
9708 make_sat_signed_ ## KIND ## _type (SIZE); \
9709 sat_unsigned_ ## KIND ## _type_node = \
9710 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9711 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9712 unsigned_ ## KIND ## _type_node = \
9713 make_unsigned_ ## KIND ## _type (SIZE);
9714
9715 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9716 sat_ ## WIDTH ## KIND ## _type_node = \
9717 make_sat_signed_ ## KIND ## _type (SIZE); \
9718 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9719 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9720 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9721 unsigned_ ## WIDTH ## KIND ## _type_node = \
9722 make_unsigned_ ## KIND ## _type (SIZE);
9723
9724 /* Make fixed-point type nodes based on four different widths. */
9725 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9726 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9727 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9728 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9729 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9730
9731 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9732 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9733 NAME ## _type_node = \
9734 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9735 u ## NAME ## _type_node = \
9736 make_or_reuse_unsigned_ ## KIND ## _type \
9737 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9738 sat_ ## NAME ## _type_node = \
9739 make_or_reuse_sat_signed_ ## KIND ## _type \
9740 (GET_MODE_BITSIZE (MODE ## mode)); \
9741 sat_u ## NAME ## _type_node = \
9742 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9743 (GET_MODE_BITSIZE (U ## MODE ## mode));
9744
9745 /* Fixed-point type and mode nodes. */
9746 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
9747 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
9748 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
9749 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
9750 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
9751 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
9752 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
9753 MAKE_FIXED_MODE_NODE (accum, ha, HA)
9754 MAKE_FIXED_MODE_NODE (accum, sa, SA)
9755 MAKE_FIXED_MODE_NODE (accum, da, DA)
9756 MAKE_FIXED_MODE_NODE (accum, ta, TA)
9757
9758 {
9759 tree t = targetm.build_builtin_va_list ();
9760
9761 /* Many back-ends define record types without setting TYPE_NAME.
9762 If we copied the record type here, we'd keep the original
9763 record type without a name. This breaks name mangling. So,
9764 don't copy record types and let c_common_nodes_and_builtins()
9765 declare the type to be __builtin_va_list. */
9766 if (TREE_CODE (t) != RECORD_TYPE)
9767 t = build_variant_type_copy (t);
9768
9769 va_list_type_node = t;
9770 }
9771 }
9772
9773 /* Modify DECL for given flags.
9774 TM_PURE attribute is set only on types, so the function will modify
9775 DECL's type when ECF_TM_PURE is used. */
9776
9777 void
9778 set_call_expr_flags (tree decl, int flags)
9779 {
9780 if (flags & ECF_NOTHROW)
9781 TREE_NOTHROW (decl) = 1;
9782 if (flags & ECF_CONST)
9783 TREE_READONLY (decl) = 1;
9784 if (flags & ECF_PURE)
9785 DECL_PURE_P (decl) = 1;
9786 if (flags & ECF_LOOPING_CONST_OR_PURE)
9787 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
9788 if (flags & ECF_NOVOPS)
9789 DECL_IS_NOVOPS (decl) = 1;
9790 if (flags & ECF_NORETURN)
9791 TREE_THIS_VOLATILE (decl) = 1;
9792 if (flags & ECF_MALLOC)
9793 DECL_IS_MALLOC (decl) = 1;
9794 if (flags & ECF_RETURNS_TWICE)
9795 DECL_IS_RETURNS_TWICE (decl) = 1;
9796 if (flags & ECF_LEAF)
9797 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
9798 NULL, DECL_ATTRIBUTES (decl));
9799 if ((flags & ECF_TM_PURE) && flag_tm)
9800 apply_tm_attr (decl, get_identifier ("transaction_pure"));
9801 /* Looping const or pure is implied by noreturn.
9802 There is currently no way to declare looping const or looping pure alone. */
9803 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
9804 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
9805 }
9806
9807
9808 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9809
9810 static void
9811 local_define_builtin (const char *name, tree type, enum built_in_function code,
9812 const char *library_name, int ecf_flags)
9813 {
9814 tree decl;
9815
9816 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
9817 library_name, NULL_TREE);
9818 set_call_expr_flags (decl, ecf_flags);
9819
9820 set_builtin_decl (code, decl, true);
9821 }
9822
9823 /* Call this function after instantiating all builtins that the language
9824 front end cares about. This will build the rest of the builtins that
9825 are relied upon by the tree optimizers and the middle-end. */
9826
9827 void
9828 build_common_builtin_nodes (void)
9829 {
9830 tree tmp, ftype;
9831 int ecf_flags;
9832
9833 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
9834 {
9835 ftype = build_function_type (void_type_node, void_list_node);
9836 local_define_builtin ("__builtin_unreachable", ftype, BUILT_IN_UNREACHABLE,
9837 "__builtin_unreachable",
9838 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
9839 | ECF_CONST | ECF_LEAF);
9840 }
9841
9842 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
9843 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9844 {
9845 ftype = build_function_type_list (ptr_type_node,
9846 ptr_type_node, const_ptr_type_node,
9847 size_type_node, NULL_TREE);
9848
9849 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
9850 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
9851 "memcpy", ECF_NOTHROW | ECF_LEAF);
9852 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9853 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
9854 "memmove", ECF_NOTHROW | ECF_LEAF);
9855 }
9856
9857 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
9858 {
9859 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9860 const_ptr_type_node, size_type_node,
9861 NULL_TREE);
9862 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
9863 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9864 }
9865
9866 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
9867 {
9868 ftype = build_function_type_list (ptr_type_node,
9869 ptr_type_node, integer_type_node,
9870 size_type_node, NULL_TREE);
9871 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
9872 "memset", ECF_NOTHROW | ECF_LEAF);
9873 }
9874
9875 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
9876 {
9877 ftype = build_function_type_list (ptr_type_node,
9878 size_type_node, NULL_TREE);
9879 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
9880 "alloca", ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
9881 }
9882
9883 ftype = build_function_type_list (ptr_type_node, size_type_node,
9884 size_type_node, NULL_TREE);
9885 local_define_builtin ("__builtin_alloca_with_align", ftype,
9886 BUILT_IN_ALLOCA_WITH_ALIGN, "alloca",
9887 ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
9888
9889 /* If we're checking the stack, `alloca' can throw. */
9890 if (flag_stack_check)
9891 {
9892 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA)) = 0;
9893 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN)) = 0;
9894 }
9895
9896 ftype = build_function_type_list (void_type_node,
9897 ptr_type_node, ptr_type_node,
9898 ptr_type_node, NULL_TREE);
9899 local_define_builtin ("__builtin_init_trampoline", ftype,
9900 BUILT_IN_INIT_TRAMPOLINE,
9901 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
9902 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
9903 BUILT_IN_INIT_HEAP_TRAMPOLINE,
9904 "__builtin_init_heap_trampoline",
9905 ECF_NOTHROW | ECF_LEAF);
9906
9907 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
9908 local_define_builtin ("__builtin_adjust_trampoline", ftype,
9909 BUILT_IN_ADJUST_TRAMPOLINE,
9910 "__builtin_adjust_trampoline",
9911 ECF_CONST | ECF_NOTHROW);
9912
9913 ftype = build_function_type_list (void_type_node,
9914 ptr_type_node, ptr_type_node, NULL_TREE);
9915 local_define_builtin ("__builtin_nonlocal_goto", ftype,
9916 BUILT_IN_NONLOCAL_GOTO,
9917 "__builtin_nonlocal_goto",
9918 ECF_NORETURN | ECF_NOTHROW);
9919
9920 ftype = build_function_type_list (void_type_node,
9921 ptr_type_node, ptr_type_node, NULL_TREE);
9922 local_define_builtin ("__builtin_setjmp_setup", ftype,
9923 BUILT_IN_SETJMP_SETUP,
9924 "__builtin_setjmp_setup", ECF_NOTHROW);
9925
9926 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
9927 local_define_builtin ("__builtin_setjmp_dispatcher", ftype,
9928 BUILT_IN_SETJMP_DISPATCHER,
9929 "__builtin_setjmp_dispatcher",
9930 ECF_PURE | ECF_NOTHROW);
9931
9932 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9933 local_define_builtin ("__builtin_setjmp_receiver", ftype,
9934 BUILT_IN_SETJMP_RECEIVER,
9935 "__builtin_setjmp_receiver", ECF_NOTHROW);
9936
9937 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
9938 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
9939 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
9940
9941 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9942 local_define_builtin ("__builtin_stack_restore", ftype,
9943 BUILT_IN_STACK_RESTORE,
9944 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
9945
9946 /* If there's a possibility that we might use the ARM EABI, build the
9947 alternate __cxa_end_cleanup node used to resume from C++ and Java. */
9948 if (targetm.arm_eabi_unwinder)
9949 {
9950 ftype = build_function_type_list (void_type_node, NULL_TREE);
9951 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
9952 BUILT_IN_CXA_END_CLEANUP,
9953 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
9954 }
9955
9956 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9957 local_define_builtin ("__builtin_unwind_resume", ftype,
9958 BUILT_IN_UNWIND_RESUME,
9959 ((targetm_common.except_unwind_info (&global_options)
9960 == UI_SJLJ)
9961 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
9962 ECF_NORETURN);
9963
9964 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
9965 {
9966 ftype = build_function_type_list (ptr_type_node, integer_type_node,
9967 NULL_TREE);
9968 local_define_builtin ("__builtin_return_address", ftype,
9969 BUILT_IN_RETURN_ADDRESS,
9970 "__builtin_return_address",
9971 ECF_NOTHROW);
9972 }
9973
9974 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
9975 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
9976 {
9977 ftype = build_function_type_list (void_type_node, ptr_type_node,
9978 ptr_type_node, NULL_TREE);
9979 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
9980 local_define_builtin ("__cyg_profile_func_enter", ftype,
9981 BUILT_IN_PROFILE_FUNC_ENTER,
9982 "__cyg_profile_func_enter", 0);
9983 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
9984 local_define_builtin ("__cyg_profile_func_exit", ftype,
9985 BUILT_IN_PROFILE_FUNC_EXIT,
9986 "__cyg_profile_func_exit", 0);
9987 }
9988
9989 /* The exception object and filter values from the runtime. The argument
9990 must be zero before exception lowering, i.e. from the front end. After
9991 exception lowering, it will be the region number for the exception
9992 landing pad. These functions are PURE instead of CONST to prevent
9993 them from being hoisted past the exception edge that will initialize
9994 its value in the landing pad. */
9995 ftype = build_function_type_list (ptr_type_node,
9996 integer_type_node, NULL_TREE);
9997 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
9998 /* Only use TM_PURE if we we have TM language support. */
9999 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10000 ecf_flags |= ECF_TM_PURE;
10001 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10002 "__builtin_eh_pointer", ecf_flags);
10003
10004 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10005 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10006 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10007 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10008
10009 ftype = build_function_type_list (void_type_node,
10010 integer_type_node, integer_type_node,
10011 NULL_TREE);
10012 local_define_builtin ("__builtin_eh_copy_values", ftype,
10013 BUILT_IN_EH_COPY_VALUES,
10014 "__builtin_eh_copy_values", ECF_NOTHROW);
10015
10016 /* Complex multiplication and division. These are handled as builtins
10017 rather than optabs because emit_library_call_value doesn't support
10018 complex. Further, we can do slightly better with folding these
10019 beasties if the real and complex parts of the arguments are separate. */
10020 {
10021 int mode;
10022
10023 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10024 {
10025 char mode_name_buf[4], *q;
10026 const char *p;
10027 enum built_in_function mcode, dcode;
10028 tree type, inner_type;
10029 const char *prefix = "__";
10030
10031 if (targetm.libfunc_gnu_prefix)
10032 prefix = "__gnu_";
10033
10034 type = lang_hooks.types.type_for_mode ((enum machine_mode) mode, 0);
10035 if (type == NULL)
10036 continue;
10037 inner_type = TREE_TYPE (type);
10038
10039 ftype = build_function_type_list (type, inner_type, inner_type,
10040 inner_type, inner_type, NULL_TREE);
10041
10042 mcode = ((enum built_in_function)
10043 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10044 dcode = ((enum built_in_function)
10045 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10046
10047 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10048 *q = TOLOWER (*p);
10049 *q = '\0';
10050
10051 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10052 NULL);
10053 local_define_builtin (built_in_names[mcode], ftype, mcode,
10054 built_in_names[mcode],
10055 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10056
10057 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10058 NULL);
10059 local_define_builtin (built_in_names[dcode], ftype, dcode,
10060 built_in_names[dcode],
10061 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10062 }
10063 }
10064 }
10065
10066 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10067 better way.
10068
10069 If we requested a pointer to a vector, build up the pointers that
10070 we stripped off while looking for the inner type. Similarly for
10071 return values from functions.
10072
10073 The argument TYPE is the top of the chain, and BOTTOM is the
10074 new type which we will point to. */
10075
10076 tree
10077 reconstruct_complex_type (tree type, tree bottom)
10078 {
10079 tree inner, outer;
10080
10081 if (TREE_CODE (type) == POINTER_TYPE)
10082 {
10083 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10084 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10085 TYPE_REF_CAN_ALIAS_ALL (type));
10086 }
10087 else if (TREE_CODE (type) == REFERENCE_TYPE)
10088 {
10089 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10090 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10091 TYPE_REF_CAN_ALIAS_ALL (type));
10092 }
10093 else if (TREE_CODE (type) == ARRAY_TYPE)
10094 {
10095 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10096 outer = build_array_type (inner, TYPE_DOMAIN (type));
10097 }
10098 else if (TREE_CODE (type) == FUNCTION_TYPE)
10099 {
10100 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10101 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10102 }
10103 else if (TREE_CODE (type) == METHOD_TYPE)
10104 {
10105 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10106 /* The build_method_type_directly() routine prepends 'this' to argument list,
10107 so we must compensate by getting rid of it. */
10108 outer
10109 = build_method_type_directly
10110 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10111 inner,
10112 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10113 }
10114 else if (TREE_CODE (type) == OFFSET_TYPE)
10115 {
10116 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10117 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10118 }
10119 else
10120 return bottom;
10121
10122 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10123 TYPE_QUALS (type));
10124 }
10125
10126 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10127 the inner type. */
10128 tree
10129 build_vector_type_for_mode (tree innertype, enum machine_mode mode)
10130 {
10131 int nunits;
10132
10133 switch (GET_MODE_CLASS (mode))
10134 {
10135 case MODE_VECTOR_INT:
10136 case MODE_VECTOR_FLOAT:
10137 case MODE_VECTOR_FRACT:
10138 case MODE_VECTOR_UFRACT:
10139 case MODE_VECTOR_ACCUM:
10140 case MODE_VECTOR_UACCUM:
10141 nunits = GET_MODE_NUNITS (mode);
10142 break;
10143
10144 case MODE_INT:
10145 /* Check that there are no leftover bits. */
10146 gcc_assert (GET_MODE_BITSIZE (mode)
10147 % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10148
10149 nunits = GET_MODE_BITSIZE (mode)
10150 / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10151 break;
10152
10153 default:
10154 gcc_unreachable ();
10155 }
10156
10157 return make_vector_type (innertype, nunits, mode);
10158 }
10159
10160 /* Similarly, but takes the inner type and number of units, which must be
10161 a power of two. */
10162
10163 tree
10164 build_vector_type (tree innertype, int nunits)
10165 {
10166 return make_vector_type (innertype, nunits, VOIDmode);
10167 }
10168
10169 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10170
10171 tree
10172 build_opaque_vector_type (tree innertype, int nunits)
10173 {
10174 tree t = make_vector_type (innertype, nunits, VOIDmode);
10175 tree cand;
10176 /* We always build the non-opaque variant before the opaque one,
10177 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10178 cand = TYPE_NEXT_VARIANT (t);
10179 if (cand
10180 && TYPE_VECTOR_OPAQUE (cand)
10181 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10182 return cand;
10183 /* Othewise build a variant type and make sure to queue it after
10184 the non-opaque type. */
10185 cand = build_distinct_type_copy (t);
10186 TYPE_VECTOR_OPAQUE (cand) = true;
10187 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10188 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10189 TYPE_NEXT_VARIANT (t) = cand;
10190 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10191 return cand;
10192 }
10193
10194
10195 /* Given an initializer INIT, return TRUE if INIT is zero or some
10196 aggregate of zeros. Otherwise return FALSE. */
10197 bool
10198 initializer_zerop (const_tree init)
10199 {
10200 tree elt;
10201
10202 STRIP_NOPS (init);
10203
10204 switch (TREE_CODE (init))
10205 {
10206 case INTEGER_CST:
10207 return integer_zerop (init);
10208
10209 case REAL_CST:
10210 /* ??? Note that this is not correct for C4X float formats. There,
10211 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10212 negative exponent. */
10213 return real_zerop (init)
10214 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
10215
10216 case FIXED_CST:
10217 return fixed_zerop (init);
10218
10219 case COMPLEX_CST:
10220 return integer_zerop (init)
10221 || (real_zerop (init)
10222 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10223 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
10224
10225 case VECTOR_CST:
10226 {
10227 unsigned i;
10228 for (i = 0; i < VECTOR_CST_NELTS (init); ++i)
10229 if (!initializer_zerop (VECTOR_CST_ELT (init, i)))
10230 return false;
10231 return true;
10232 }
10233
10234 case CONSTRUCTOR:
10235 {
10236 unsigned HOST_WIDE_INT idx;
10237
10238 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10239 if (!initializer_zerop (elt))
10240 return false;
10241 return true;
10242 }
10243
10244 case STRING_CST:
10245 {
10246 int i;
10247
10248 /* We need to loop through all elements to handle cases like
10249 "\0" and "\0foobar". */
10250 for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
10251 if (TREE_STRING_POINTER (init)[i] != '\0')
10252 return false;
10253
10254 return true;
10255 }
10256
10257 default:
10258 return false;
10259 }
10260 }
10261
10262 /* Check if vector VEC consists of all the equal elements and
10263 that the number of elements corresponds to the type of VEC.
10264 The function returns first element of the vector
10265 or NULL_TREE if the vector is not uniform. */
10266 tree
10267 uniform_vector_p (const_tree vec)
10268 {
10269 tree first, t;
10270 unsigned i;
10271
10272 if (vec == NULL_TREE)
10273 return NULL_TREE;
10274
10275 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10276
10277 if (TREE_CODE (vec) == VECTOR_CST)
10278 {
10279 first = VECTOR_CST_ELT (vec, 0);
10280 for (i = 1; i < VECTOR_CST_NELTS (vec); ++i)
10281 if (!operand_equal_p (first, VECTOR_CST_ELT (vec, i), 0))
10282 return NULL_TREE;
10283
10284 return first;
10285 }
10286
10287 else if (TREE_CODE (vec) == CONSTRUCTOR)
10288 {
10289 first = error_mark_node;
10290
10291 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10292 {
10293 if (i == 0)
10294 {
10295 first = t;
10296 continue;
10297 }
10298 if (!operand_equal_p (first, t, 0))
10299 return NULL_TREE;
10300 }
10301 if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)))
10302 return NULL_TREE;
10303
10304 return first;
10305 }
10306
10307 return NULL_TREE;
10308 }
10309
10310 /* Build an empty statement at location LOC. */
10311
10312 tree
10313 build_empty_stmt (location_t loc)
10314 {
10315 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10316 SET_EXPR_LOCATION (t, loc);
10317 return t;
10318 }
10319
10320
10321 /* Build an OpenMP clause with code CODE. LOC is the location of the
10322 clause. */
10323
10324 tree
10325 build_omp_clause (location_t loc, enum omp_clause_code code)
10326 {
10327 tree t;
10328 int size, length;
10329
10330 length = omp_clause_num_ops[code];
10331 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10332
10333 record_node_allocation_statistics (OMP_CLAUSE, size);
10334
10335 t = ggc_alloc_tree_node (size);
10336 memset (t, 0, size);
10337 TREE_SET_CODE (t, OMP_CLAUSE);
10338 OMP_CLAUSE_SET_CODE (t, code);
10339 OMP_CLAUSE_LOCATION (t) = loc;
10340
10341 return t;
10342 }
10343
10344 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10345 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10346 Except for the CODE and operand count field, other storage for the
10347 object is initialized to zeros. */
10348
10349 tree
10350 build_vl_exp_stat (enum tree_code code, int len MEM_STAT_DECL)
10351 {
10352 tree t;
10353 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10354
10355 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10356 gcc_assert (len >= 1);
10357
10358 record_node_allocation_statistics (code, length);
10359
10360 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10361
10362 TREE_SET_CODE (t, code);
10363
10364 /* Can't use TREE_OPERAND to store the length because if checking is
10365 enabled, it will try to check the length before we store it. :-P */
10366 t->exp.operands[0] = build_int_cst (sizetype, len);
10367
10368 return t;
10369 }
10370
10371 /* Helper function for build_call_* functions; build a CALL_EXPR with
10372 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10373 the argument slots. */
10374
10375 static tree
10376 build_call_1 (tree return_type, tree fn, int nargs)
10377 {
10378 tree t;
10379
10380 t = build_vl_exp (CALL_EXPR, nargs + 3);
10381 TREE_TYPE (t) = return_type;
10382 CALL_EXPR_FN (t) = fn;
10383 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10384
10385 return t;
10386 }
10387
10388 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10389 FN and a null static chain slot. NARGS is the number of call arguments
10390 which are specified as "..." arguments. */
10391
10392 tree
10393 build_call_nary (tree return_type, tree fn, int nargs, ...)
10394 {
10395 tree ret;
10396 va_list args;
10397 va_start (args, nargs);
10398 ret = build_call_valist (return_type, fn, nargs, args);
10399 va_end (args);
10400 return ret;
10401 }
10402
10403 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10404 FN and a null static chain slot. NARGS is the number of call arguments
10405 which are specified as a va_list ARGS. */
10406
10407 tree
10408 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10409 {
10410 tree t;
10411 int i;
10412
10413 t = build_call_1 (return_type, fn, nargs);
10414 for (i = 0; i < nargs; i++)
10415 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10416 process_call_operands (t);
10417 return t;
10418 }
10419
10420 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10421 FN and a null static chain slot. NARGS is the number of call arguments
10422 which are specified as a tree array ARGS. */
10423
10424 tree
10425 build_call_array_loc (location_t loc, tree return_type, tree fn,
10426 int nargs, const tree *args)
10427 {
10428 tree t;
10429 int i;
10430
10431 t = build_call_1 (return_type, fn, nargs);
10432 for (i = 0; i < nargs; i++)
10433 CALL_EXPR_ARG (t, i) = args[i];
10434 process_call_operands (t);
10435 SET_EXPR_LOCATION (t, loc);
10436 return t;
10437 }
10438
10439 /* Like build_call_array, but takes a vec. */
10440
10441 tree
10442 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
10443 {
10444 tree ret, t;
10445 unsigned int ix;
10446
10447 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10448 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10449 CALL_EXPR_ARG (ret, ix) = t;
10450 process_call_operands (ret);
10451 return ret;
10452 }
10453
10454 /* Return true if T (assumed to be a DECL) must be assigned a memory
10455 location. */
10456
10457 bool
10458 needs_to_live_in_memory (const_tree t)
10459 {
10460 return (TREE_ADDRESSABLE (t)
10461 || is_global_var (t)
10462 || (TREE_CODE (t) == RESULT_DECL
10463 && !DECL_BY_REFERENCE (t)
10464 && aggregate_value_p (t, current_function_decl)));
10465 }
10466
10467 /* Return value of a constant X and sign-extend it. */
10468
10469 HOST_WIDE_INT
10470 int_cst_value (const_tree x)
10471 {
10472 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10473 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
10474
10475 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10476 gcc_assert (cst_fits_shwi_p (x));
10477
10478 if (bits < HOST_BITS_PER_WIDE_INT)
10479 {
10480 bool negative = ((val >> (bits - 1)) & 1) != 0;
10481 if (negative)
10482 val |= (~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1;
10483 else
10484 val &= ~((~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1);
10485 }
10486
10487 return val;
10488 }
10489
10490 /* Return value of a constant X and sign-extend it. */
10491
10492 HOST_WIDEST_INT
10493 widest_int_cst_value (const_tree x)
10494 {
10495 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10496 unsigned HOST_WIDEST_INT val = TREE_INT_CST_LOW (x);
10497
10498 #if HOST_BITS_PER_WIDEST_INT > HOST_BITS_PER_WIDE_INT
10499 gcc_assert (HOST_BITS_PER_WIDEST_INT >= HOST_BITS_PER_DOUBLE_INT);
10500 gcc_assert (TREE_INT_CST_NUNITS (x) == 2);
10501
10502 if (TREE_INT_CST_NUNITS (x) == 1)
10503 val = HOST_WIDE_INT (val);
10504 else
10505 val |= (((unsigned HOST_WIDEST_INT) TREE_INT_CST_ELT (x, 1))
10506 << HOST_BITS_PER_WIDE_INT);
10507 #else
10508 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10509 gcc_assert (TREE_INT_CST_NUNITS (x) == 1);
10510 #endif
10511
10512 if (bits < HOST_BITS_PER_WIDEST_INT)
10513 {
10514 bool negative = ((val >> (bits - 1)) & 1) != 0;
10515 if (negative)
10516 val |= (~(unsigned HOST_WIDEST_INT) 0) << (bits - 1) << 1;
10517 else
10518 val &= ~((~(unsigned HOST_WIDEST_INT) 0) << (bits - 1) << 1);
10519 }
10520
10521 return val;
10522 }
10523
10524 /* If TYPE is an integral or pointer type, return an integer type with
10525 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10526 if TYPE is already an integer type of signedness UNSIGNEDP. */
10527
10528 tree
10529 signed_or_unsigned_type_for (int unsignedp, tree type)
10530 {
10531 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
10532 return type;
10533
10534 if (TREE_CODE (type) == VECTOR_TYPE)
10535 {
10536 tree inner = TREE_TYPE (type);
10537 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10538 if (!inner2)
10539 return NULL_TREE;
10540 if (inner == inner2)
10541 return type;
10542 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
10543 }
10544
10545 if (!INTEGRAL_TYPE_P (type)
10546 && !POINTER_TYPE_P (type))
10547 return NULL_TREE;
10548
10549 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
10550 }
10551
10552 /* If TYPE is an integral or pointer type, return an integer type with
10553 the same precision which is unsigned, or itself if TYPE is already an
10554 unsigned integer type. */
10555
10556 tree
10557 unsigned_type_for (tree type)
10558 {
10559 return signed_or_unsigned_type_for (1, type);
10560 }
10561
10562 /* If TYPE is an integral or pointer type, return an integer type with
10563 the same precision which is signed, or itself if TYPE is already a
10564 signed integer type. */
10565
10566 tree
10567 signed_type_for (tree type)
10568 {
10569 return signed_or_unsigned_type_for (0, type);
10570 }
10571
10572 /* If TYPE is a vector type, return a signed integer vector type with the
10573 same width and number of subparts. Otherwise return boolean_type_node. */
10574
10575 tree
10576 truth_type_for (tree type)
10577 {
10578 if (TREE_CODE (type) == VECTOR_TYPE)
10579 {
10580 tree elem = lang_hooks.types.type_for_size
10581 (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))), 0);
10582 return build_opaque_vector_type (elem, TYPE_VECTOR_SUBPARTS (type));
10583 }
10584 else
10585 return boolean_type_node;
10586 }
10587
10588 /* Returns the largest value obtainable by casting something in INNER type to
10589 OUTER type. */
10590
10591 tree
10592 upper_bound_in_type (tree outer, tree inner)
10593 {
10594 unsigned int det = 0;
10595 unsigned oprec = TYPE_PRECISION (outer);
10596 unsigned iprec = TYPE_PRECISION (inner);
10597 unsigned prec;
10598
10599 /* Compute a unique number for every combination. */
10600 det |= (oprec > iprec) ? 4 : 0;
10601 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
10602 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
10603
10604 /* Determine the exponent to use. */
10605 switch (det)
10606 {
10607 case 0:
10608 case 1:
10609 /* oprec <= iprec, outer: signed, inner: don't care. */
10610 prec = oprec - 1;
10611 break;
10612 case 2:
10613 case 3:
10614 /* oprec <= iprec, outer: unsigned, inner: don't care. */
10615 prec = oprec;
10616 break;
10617 case 4:
10618 /* oprec > iprec, outer: signed, inner: signed. */
10619 prec = iprec - 1;
10620 break;
10621 case 5:
10622 /* oprec > iprec, outer: signed, inner: unsigned. */
10623 prec = iprec;
10624 break;
10625 case 6:
10626 /* oprec > iprec, outer: unsigned, inner: signed. */
10627 prec = oprec;
10628 break;
10629 case 7:
10630 /* oprec > iprec, outer: unsigned, inner: unsigned. */
10631 prec = iprec;
10632 break;
10633 default:
10634 gcc_unreachable ();
10635 }
10636
10637 return wide_int_to_tree (outer,
10638 wi::mask (prec, false, TYPE_PRECISION (outer)));
10639 }
10640
10641 /* Returns the smallest value obtainable by casting something in INNER type to
10642 OUTER type. */
10643
10644 tree
10645 lower_bound_in_type (tree outer, tree inner)
10646 {
10647 unsigned oprec = TYPE_PRECISION (outer);
10648 unsigned iprec = TYPE_PRECISION (inner);
10649
10650 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
10651 and obtain 0. */
10652 if (TYPE_UNSIGNED (outer)
10653 /* If we are widening something of an unsigned type, OUTER type
10654 contains all values of INNER type. In particular, both INNER
10655 and OUTER types have zero in common. */
10656 || (oprec > iprec && TYPE_UNSIGNED (inner)))
10657 return build_int_cst (outer, 0);
10658 else
10659 {
10660 /* If we are widening a signed type to another signed type, we
10661 want to obtain -2^^(iprec-1). If we are keeping the
10662 precision or narrowing to a signed type, we want to obtain
10663 -2^(oprec-1). */
10664 unsigned prec = oprec > iprec ? iprec : oprec;
10665 return wide_int_to_tree (outer,
10666 wi::mask (prec - 1, true,
10667 TYPE_PRECISION (outer)));
10668 }
10669 }
10670
10671 /* Return nonzero if two operands that are suitable for PHI nodes are
10672 necessarily equal. Specifically, both ARG0 and ARG1 must be either
10673 SSA_NAME or invariant. Note that this is strictly an optimization.
10674 That is, callers of this function can directly call operand_equal_p
10675 and get the same result, only slower. */
10676
10677 int
10678 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
10679 {
10680 if (arg0 == arg1)
10681 return 1;
10682 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
10683 return 0;
10684 return operand_equal_p (arg0, arg1, 0);
10685 }
10686
10687 /* Returns number of zeros at the end of binary representation of X. */
10688
10689 tree
10690 num_ending_zeros (const_tree x)
10691 {
10692 return build_int_cst (TREE_TYPE (x), wi::ctz (x));
10693 }
10694
10695
10696 #define WALK_SUBTREE(NODE) \
10697 do \
10698 { \
10699 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
10700 if (result) \
10701 return result; \
10702 } \
10703 while (0)
10704
10705 /* This is a subroutine of walk_tree that walks field of TYPE that are to
10706 be walked whenever a type is seen in the tree. Rest of operands and return
10707 value are as for walk_tree. */
10708
10709 static tree
10710 walk_type_fields (tree type, walk_tree_fn func, void *data,
10711 struct pointer_set_t *pset, walk_tree_lh lh)
10712 {
10713 tree result = NULL_TREE;
10714
10715 switch (TREE_CODE (type))
10716 {
10717 case POINTER_TYPE:
10718 case REFERENCE_TYPE:
10719 /* We have to worry about mutually recursive pointers. These can't
10720 be written in C. They can in Ada. It's pathological, but
10721 there's an ACATS test (c38102a) that checks it. Deal with this
10722 by checking if we're pointing to another pointer, that one
10723 points to another pointer, that one does too, and we have no htab.
10724 If so, get a hash table. We check three levels deep to avoid
10725 the cost of the hash table if we don't need one. */
10726 if (POINTER_TYPE_P (TREE_TYPE (type))
10727 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
10728 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
10729 && !pset)
10730 {
10731 result = walk_tree_without_duplicates (&TREE_TYPE (type),
10732 func, data);
10733 if (result)
10734 return result;
10735
10736 break;
10737 }
10738
10739 /* ... fall through ... */
10740
10741 case COMPLEX_TYPE:
10742 WALK_SUBTREE (TREE_TYPE (type));
10743 break;
10744
10745 case METHOD_TYPE:
10746 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
10747
10748 /* Fall through. */
10749
10750 case FUNCTION_TYPE:
10751 WALK_SUBTREE (TREE_TYPE (type));
10752 {
10753 tree arg;
10754
10755 /* We never want to walk into default arguments. */
10756 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
10757 WALK_SUBTREE (TREE_VALUE (arg));
10758 }
10759 break;
10760
10761 case ARRAY_TYPE:
10762 /* Don't follow this nodes's type if a pointer for fear that
10763 we'll have infinite recursion. If we have a PSET, then we
10764 need not fear. */
10765 if (pset
10766 || (!POINTER_TYPE_P (TREE_TYPE (type))
10767 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
10768 WALK_SUBTREE (TREE_TYPE (type));
10769 WALK_SUBTREE (TYPE_DOMAIN (type));
10770 break;
10771
10772 case OFFSET_TYPE:
10773 WALK_SUBTREE (TREE_TYPE (type));
10774 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
10775 break;
10776
10777 default:
10778 break;
10779 }
10780
10781 return NULL_TREE;
10782 }
10783
10784 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
10785 called with the DATA and the address of each sub-tree. If FUNC returns a
10786 non-NULL value, the traversal is stopped, and the value returned by FUNC
10787 is returned. If PSET is non-NULL it is used to record the nodes visited,
10788 and to avoid visiting a node more than once. */
10789
10790 tree
10791 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
10792 struct pointer_set_t *pset, walk_tree_lh lh)
10793 {
10794 enum tree_code code;
10795 int walk_subtrees;
10796 tree result;
10797
10798 #define WALK_SUBTREE_TAIL(NODE) \
10799 do \
10800 { \
10801 tp = & (NODE); \
10802 goto tail_recurse; \
10803 } \
10804 while (0)
10805
10806 tail_recurse:
10807 /* Skip empty subtrees. */
10808 if (!*tp)
10809 return NULL_TREE;
10810
10811 /* Don't walk the same tree twice, if the user has requested
10812 that we avoid doing so. */
10813 if (pset && pointer_set_insert (pset, *tp))
10814 return NULL_TREE;
10815
10816 /* Call the function. */
10817 walk_subtrees = 1;
10818 result = (*func) (tp, &walk_subtrees, data);
10819
10820 /* If we found something, return it. */
10821 if (result)
10822 return result;
10823
10824 code = TREE_CODE (*tp);
10825
10826 /* Even if we didn't, FUNC may have decided that there was nothing
10827 interesting below this point in the tree. */
10828 if (!walk_subtrees)
10829 {
10830 /* But we still need to check our siblings. */
10831 if (code == TREE_LIST)
10832 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
10833 else if (code == OMP_CLAUSE)
10834 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
10835 else
10836 return NULL_TREE;
10837 }
10838
10839 if (lh)
10840 {
10841 result = (*lh) (tp, &walk_subtrees, func, data, pset);
10842 if (result || !walk_subtrees)
10843 return result;
10844 }
10845
10846 switch (code)
10847 {
10848 case ERROR_MARK:
10849 case IDENTIFIER_NODE:
10850 case INTEGER_CST:
10851 case REAL_CST:
10852 case FIXED_CST:
10853 case VECTOR_CST:
10854 case STRING_CST:
10855 case BLOCK:
10856 case PLACEHOLDER_EXPR:
10857 case SSA_NAME:
10858 case FIELD_DECL:
10859 case RESULT_DECL:
10860 /* None of these have subtrees other than those already walked
10861 above. */
10862 break;
10863
10864 case TREE_LIST:
10865 WALK_SUBTREE (TREE_VALUE (*tp));
10866 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
10867 break;
10868
10869 case TREE_VEC:
10870 {
10871 int len = TREE_VEC_LENGTH (*tp);
10872
10873 if (len == 0)
10874 break;
10875
10876 /* Walk all elements but the first. */
10877 while (--len)
10878 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
10879
10880 /* Now walk the first one as a tail call. */
10881 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
10882 }
10883
10884 case COMPLEX_CST:
10885 WALK_SUBTREE (TREE_REALPART (*tp));
10886 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
10887
10888 case CONSTRUCTOR:
10889 {
10890 unsigned HOST_WIDE_INT idx;
10891 constructor_elt *ce;
10892
10893 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
10894 idx++)
10895 WALK_SUBTREE (ce->value);
10896 }
10897 break;
10898
10899 case SAVE_EXPR:
10900 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
10901
10902 case BIND_EXPR:
10903 {
10904 tree decl;
10905 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
10906 {
10907 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
10908 into declarations that are just mentioned, rather than
10909 declared; they don't really belong to this part of the tree.
10910 And, we can see cycles: the initializer for a declaration
10911 can refer to the declaration itself. */
10912 WALK_SUBTREE (DECL_INITIAL (decl));
10913 WALK_SUBTREE (DECL_SIZE (decl));
10914 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
10915 }
10916 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
10917 }
10918
10919 case STATEMENT_LIST:
10920 {
10921 tree_stmt_iterator i;
10922 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
10923 WALK_SUBTREE (*tsi_stmt_ptr (i));
10924 }
10925 break;
10926
10927 case OMP_CLAUSE:
10928 switch (OMP_CLAUSE_CODE (*tp))
10929 {
10930 case OMP_CLAUSE_PRIVATE:
10931 case OMP_CLAUSE_SHARED:
10932 case OMP_CLAUSE_FIRSTPRIVATE:
10933 case OMP_CLAUSE_COPYIN:
10934 case OMP_CLAUSE_COPYPRIVATE:
10935 case OMP_CLAUSE_FINAL:
10936 case OMP_CLAUSE_IF:
10937 case OMP_CLAUSE_NUM_THREADS:
10938 case OMP_CLAUSE_SCHEDULE:
10939 case OMP_CLAUSE_UNIFORM:
10940 case OMP_CLAUSE_DEPEND:
10941 case OMP_CLAUSE_NUM_TEAMS:
10942 case OMP_CLAUSE_THREAD_LIMIT:
10943 case OMP_CLAUSE_DEVICE:
10944 case OMP_CLAUSE_DIST_SCHEDULE:
10945 case OMP_CLAUSE_SAFELEN:
10946 case OMP_CLAUSE_SIMDLEN:
10947 case OMP_CLAUSE__LOOPTEMP_:
10948 case OMP_CLAUSE__SIMDUID_:
10949 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
10950 /* FALLTHRU */
10951
10952 case OMP_CLAUSE_NOWAIT:
10953 case OMP_CLAUSE_ORDERED:
10954 case OMP_CLAUSE_DEFAULT:
10955 case OMP_CLAUSE_UNTIED:
10956 case OMP_CLAUSE_MERGEABLE:
10957 case OMP_CLAUSE_PROC_BIND:
10958 case OMP_CLAUSE_INBRANCH:
10959 case OMP_CLAUSE_NOTINBRANCH:
10960 case OMP_CLAUSE_FOR:
10961 case OMP_CLAUSE_PARALLEL:
10962 case OMP_CLAUSE_SECTIONS:
10963 case OMP_CLAUSE_TASKGROUP:
10964 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
10965
10966 case OMP_CLAUSE_LASTPRIVATE:
10967 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
10968 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
10969 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
10970
10971 case OMP_CLAUSE_COLLAPSE:
10972 {
10973 int i;
10974 for (i = 0; i < 3; i++)
10975 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
10976 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
10977 }
10978
10979 case OMP_CLAUSE_ALIGNED:
10980 case OMP_CLAUSE_LINEAR:
10981 case OMP_CLAUSE_FROM:
10982 case OMP_CLAUSE_TO:
10983 case OMP_CLAUSE_MAP:
10984 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
10985 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
10986 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
10987
10988 case OMP_CLAUSE_REDUCTION:
10989 {
10990 int i;
10991 for (i = 0; i < 4; i++)
10992 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
10993 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
10994 }
10995
10996 default:
10997 gcc_unreachable ();
10998 }
10999 break;
11000
11001 case TARGET_EXPR:
11002 {
11003 int i, len;
11004
11005 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11006 But, we only want to walk once. */
11007 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11008 for (i = 0; i < len; ++i)
11009 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11010 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11011 }
11012
11013 case DECL_EXPR:
11014 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11015 defining. We only want to walk into these fields of a type in this
11016 case and not in the general case of a mere reference to the type.
11017
11018 The criterion is as follows: if the field can be an expression, it
11019 must be walked only here. This should be in keeping with the fields
11020 that are directly gimplified in gimplify_type_sizes in order for the
11021 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11022 variable-sized types.
11023
11024 Note that DECLs get walked as part of processing the BIND_EXPR. */
11025 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11026 {
11027 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11028 if (TREE_CODE (*type_p) == ERROR_MARK)
11029 return NULL_TREE;
11030
11031 /* Call the function for the type. See if it returns anything or
11032 doesn't want us to continue. If we are to continue, walk both
11033 the normal fields and those for the declaration case. */
11034 result = (*func) (type_p, &walk_subtrees, data);
11035 if (result || !walk_subtrees)
11036 return result;
11037
11038 /* But do not walk a pointed-to type since it may itself need to
11039 be walked in the declaration case if it isn't anonymous. */
11040 if (!POINTER_TYPE_P (*type_p))
11041 {
11042 result = walk_type_fields (*type_p, func, data, pset, lh);
11043 if (result)
11044 return result;
11045 }
11046
11047 /* If this is a record type, also walk the fields. */
11048 if (RECORD_OR_UNION_TYPE_P (*type_p))
11049 {
11050 tree field;
11051
11052 for (field = TYPE_FIELDS (*type_p); field;
11053 field = DECL_CHAIN (field))
11054 {
11055 /* We'd like to look at the type of the field, but we can
11056 easily get infinite recursion. So assume it's pointed
11057 to elsewhere in the tree. Also, ignore things that
11058 aren't fields. */
11059 if (TREE_CODE (field) != FIELD_DECL)
11060 continue;
11061
11062 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11063 WALK_SUBTREE (DECL_SIZE (field));
11064 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11065 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11066 WALK_SUBTREE (DECL_QUALIFIER (field));
11067 }
11068 }
11069
11070 /* Same for scalar types. */
11071 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11072 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11073 || TREE_CODE (*type_p) == INTEGER_TYPE
11074 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11075 || TREE_CODE (*type_p) == REAL_TYPE)
11076 {
11077 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11078 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11079 }
11080
11081 WALK_SUBTREE (TYPE_SIZE (*type_p));
11082 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11083 }
11084 /* FALLTHRU */
11085
11086 default:
11087 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11088 {
11089 int i, len;
11090
11091 /* Walk over all the sub-trees of this operand. */
11092 len = TREE_OPERAND_LENGTH (*tp);
11093
11094 /* Go through the subtrees. We need to do this in forward order so
11095 that the scope of a FOR_EXPR is handled properly. */
11096 if (len)
11097 {
11098 for (i = 0; i < len - 1; ++i)
11099 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11100 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11101 }
11102 }
11103 /* If this is a type, walk the needed fields in the type. */
11104 else if (TYPE_P (*tp))
11105 return walk_type_fields (*tp, func, data, pset, lh);
11106 break;
11107 }
11108
11109 /* We didn't find what we were looking for. */
11110 return NULL_TREE;
11111
11112 #undef WALK_SUBTREE_TAIL
11113 }
11114 #undef WALK_SUBTREE
11115
11116 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11117
11118 tree
11119 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11120 walk_tree_lh lh)
11121 {
11122 tree result;
11123 struct pointer_set_t *pset;
11124
11125 pset = pointer_set_create ();
11126 result = walk_tree_1 (tp, func, data, pset, lh);
11127 pointer_set_destroy (pset);
11128 return result;
11129 }
11130
11131
11132 tree
11133 tree_block (tree t)
11134 {
11135 char const c = TREE_CODE_CLASS (TREE_CODE (t));
11136
11137 if (IS_EXPR_CODE_CLASS (c))
11138 return LOCATION_BLOCK (t->exp.locus);
11139 gcc_unreachable ();
11140 return NULL;
11141 }
11142
11143 void
11144 tree_set_block (tree t, tree b)
11145 {
11146 char const c = TREE_CODE_CLASS (TREE_CODE (t));
11147
11148 if (IS_EXPR_CODE_CLASS (c))
11149 {
11150 if (b)
11151 t->exp.locus = COMBINE_LOCATION_DATA (line_table, t->exp.locus, b);
11152 else
11153 t->exp.locus = LOCATION_LOCUS (t->exp.locus);
11154 }
11155 else
11156 gcc_unreachable ();
11157 }
11158
11159 /* Create a nameless artificial label and put it in the current
11160 function context. The label has a location of LOC. Returns the
11161 newly created label. */
11162
11163 tree
11164 create_artificial_label (location_t loc)
11165 {
11166 tree lab = build_decl (loc,
11167 LABEL_DECL, NULL_TREE, void_type_node);
11168
11169 DECL_ARTIFICIAL (lab) = 1;
11170 DECL_IGNORED_P (lab) = 1;
11171 DECL_CONTEXT (lab) = current_function_decl;
11172 return lab;
11173 }
11174
11175 /* Given a tree, try to return a useful variable name that we can use
11176 to prefix a temporary that is being assigned the value of the tree.
11177 I.E. given <temp> = &A, return A. */
11178
11179 const char *
11180 get_name (tree t)
11181 {
11182 tree stripped_decl;
11183
11184 stripped_decl = t;
11185 STRIP_NOPS (stripped_decl);
11186 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11187 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11188 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11189 {
11190 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11191 if (!name)
11192 return NULL;
11193 return IDENTIFIER_POINTER (name);
11194 }
11195 else
11196 {
11197 switch (TREE_CODE (stripped_decl))
11198 {
11199 case ADDR_EXPR:
11200 return get_name (TREE_OPERAND (stripped_decl, 0));
11201 default:
11202 return NULL;
11203 }
11204 }
11205 }
11206
11207 /* Return true if TYPE has a variable argument list. */
11208
11209 bool
11210 stdarg_p (const_tree fntype)
11211 {
11212 function_args_iterator args_iter;
11213 tree n = NULL_TREE, t;
11214
11215 if (!fntype)
11216 return false;
11217
11218 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11219 {
11220 n = t;
11221 }
11222
11223 return n != NULL_TREE && n != void_type_node;
11224 }
11225
11226 /* Return true if TYPE has a prototype. */
11227
11228 bool
11229 prototype_p (tree fntype)
11230 {
11231 tree t;
11232
11233 gcc_assert (fntype != NULL_TREE);
11234
11235 t = TYPE_ARG_TYPES (fntype);
11236 return (t != NULL_TREE);
11237 }
11238
11239 /* If BLOCK is inlined from an __attribute__((__artificial__))
11240 routine, return pointer to location from where it has been
11241 called. */
11242 location_t *
11243 block_nonartificial_location (tree block)
11244 {
11245 location_t *ret = NULL;
11246
11247 while (block && TREE_CODE (block) == BLOCK
11248 && BLOCK_ABSTRACT_ORIGIN (block))
11249 {
11250 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11251
11252 while (TREE_CODE (ao) == BLOCK
11253 && BLOCK_ABSTRACT_ORIGIN (ao)
11254 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
11255 ao = BLOCK_ABSTRACT_ORIGIN (ao);
11256
11257 if (TREE_CODE (ao) == FUNCTION_DECL)
11258 {
11259 /* If AO is an artificial inline, point RET to the
11260 call site locus at which it has been inlined and continue
11261 the loop, in case AO's caller is also an artificial
11262 inline. */
11263 if (DECL_DECLARED_INLINE_P (ao)
11264 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11265 ret = &BLOCK_SOURCE_LOCATION (block);
11266 else
11267 break;
11268 }
11269 else if (TREE_CODE (ao) != BLOCK)
11270 break;
11271
11272 block = BLOCK_SUPERCONTEXT (block);
11273 }
11274 return ret;
11275 }
11276
11277
11278 /* If EXP is inlined from an __attribute__((__artificial__))
11279 function, return the location of the original call expression. */
11280
11281 location_t
11282 tree_nonartificial_location (tree exp)
11283 {
11284 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11285
11286 if (loc)
11287 return *loc;
11288 else
11289 return EXPR_LOCATION (exp);
11290 }
11291
11292
11293 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
11294 nodes. */
11295
11296 /* Return the hash code code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11297
11298 static hashval_t
11299 cl_option_hash_hash (const void *x)
11300 {
11301 const_tree const t = (const_tree) x;
11302 const char *p;
11303 size_t i;
11304 size_t len = 0;
11305 hashval_t hash = 0;
11306
11307 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11308 {
11309 p = (const char *)TREE_OPTIMIZATION (t);
11310 len = sizeof (struct cl_optimization);
11311 }
11312
11313 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11314 {
11315 p = (const char *)TREE_TARGET_OPTION (t);
11316 len = sizeof (struct cl_target_option);
11317 }
11318
11319 else
11320 gcc_unreachable ();
11321
11322 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11323 something else. */
11324 for (i = 0; i < len; i++)
11325 if (p[i])
11326 hash = (hash << 4) ^ ((i << 2) | p[i]);
11327
11328 return hash;
11329 }
11330
11331 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11332 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11333 same. */
11334
11335 static int
11336 cl_option_hash_eq (const void *x, const void *y)
11337 {
11338 const_tree const xt = (const_tree) x;
11339 const_tree const yt = (const_tree) y;
11340 const char *xp;
11341 const char *yp;
11342 size_t len;
11343
11344 if (TREE_CODE (xt) != TREE_CODE (yt))
11345 return 0;
11346
11347 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11348 {
11349 xp = (const char *)TREE_OPTIMIZATION (xt);
11350 yp = (const char *)TREE_OPTIMIZATION (yt);
11351 len = sizeof (struct cl_optimization);
11352 }
11353
11354 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11355 {
11356 xp = (const char *)TREE_TARGET_OPTION (xt);
11357 yp = (const char *)TREE_TARGET_OPTION (yt);
11358 len = sizeof (struct cl_target_option);
11359 }
11360
11361 else
11362 gcc_unreachable ();
11363
11364 return (memcmp (xp, yp, len) == 0);
11365 }
11366
11367 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
11368
11369 tree
11370 build_optimization_node (struct gcc_options *opts)
11371 {
11372 tree t;
11373 void **slot;
11374
11375 /* Use the cache of optimization nodes. */
11376
11377 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11378 opts);
11379
11380 slot = htab_find_slot (cl_option_hash_table, cl_optimization_node, INSERT);
11381 t = (tree) *slot;
11382 if (!t)
11383 {
11384 /* Insert this one into the hash table. */
11385 t = cl_optimization_node;
11386 *slot = t;
11387
11388 /* Make a new node for next time round. */
11389 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11390 }
11391
11392 return t;
11393 }
11394
11395 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
11396
11397 tree
11398 build_target_option_node (struct gcc_options *opts)
11399 {
11400 tree t;
11401 void **slot;
11402
11403 /* Use the cache of optimization nodes. */
11404
11405 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11406 opts);
11407
11408 slot = htab_find_slot (cl_option_hash_table, cl_target_option_node, INSERT);
11409 t = (tree) *slot;
11410 if (!t)
11411 {
11412 /* Insert this one into the hash table. */
11413 t = cl_target_option_node;
11414 *slot = t;
11415
11416 /* Make a new node for next time round. */
11417 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11418 }
11419
11420 return t;
11421 }
11422
11423 /* Determine the "ultimate origin" of a block. The block may be an inlined
11424 instance of an inlined instance of a block which is local to an inline
11425 function, so we have to trace all of the way back through the origin chain
11426 to find out what sort of node actually served as the original seed for the
11427 given block. */
11428
11429 tree
11430 block_ultimate_origin (const_tree block)
11431 {
11432 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
11433
11434 /* output_inline_function sets BLOCK_ABSTRACT_ORIGIN for all the
11435 nodes in the function to point to themselves; ignore that if
11436 we're trying to output the abstract instance of this function. */
11437 if (BLOCK_ABSTRACT (block) && immediate_origin == block)
11438 return NULL_TREE;
11439
11440 if (immediate_origin == NULL_TREE)
11441 return NULL_TREE;
11442 else
11443 {
11444 tree ret_val;
11445 tree lookahead = immediate_origin;
11446
11447 do
11448 {
11449 ret_val = lookahead;
11450 lookahead = (TREE_CODE (ret_val) == BLOCK
11451 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
11452 }
11453 while (lookahead != NULL && lookahead != ret_val);
11454
11455 /* The block's abstract origin chain may not be the *ultimate* origin of
11456 the block. It could lead to a DECL that has an abstract origin set.
11457 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
11458 will give us if it has one). Note that DECL's abstract origins are
11459 supposed to be the most distant ancestor (or so decl_ultimate_origin
11460 claims), so we don't need to loop following the DECL origins. */
11461 if (DECL_P (ret_val))
11462 return DECL_ORIGIN (ret_val);
11463
11464 return ret_val;
11465 }
11466 }
11467
11468 /* Return true iff conversion in EXP generates no instruction. Mark
11469 it inline so that we fully inline into the stripping functions even
11470 though we have two uses of this function. */
11471
11472 static inline bool
11473 tree_nop_conversion (const_tree exp)
11474 {
11475 tree outer_type, inner_type;
11476
11477 if (!CONVERT_EXPR_P (exp)
11478 && TREE_CODE (exp) != NON_LVALUE_EXPR)
11479 return false;
11480 if (TREE_OPERAND (exp, 0) == error_mark_node)
11481 return false;
11482
11483 outer_type = TREE_TYPE (exp);
11484 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11485
11486 if (!inner_type)
11487 return false;
11488
11489 /* Use precision rather then machine mode when we can, which gives
11490 the correct answer even for submode (bit-field) types. */
11491 if ((INTEGRAL_TYPE_P (outer_type)
11492 || POINTER_TYPE_P (outer_type)
11493 || TREE_CODE (outer_type) == OFFSET_TYPE)
11494 && (INTEGRAL_TYPE_P (inner_type)
11495 || POINTER_TYPE_P (inner_type)
11496 || TREE_CODE (inner_type) == OFFSET_TYPE))
11497 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
11498
11499 /* Otherwise fall back on comparing machine modes (e.g. for
11500 aggregate types, floats). */
11501 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
11502 }
11503
11504 /* Return true iff conversion in EXP generates no instruction. Don't
11505 consider conversions changing the signedness. */
11506
11507 static bool
11508 tree_sign_nop_conversion (const_tree exp)
11509 {
11510 tree outer_type, inner_type;
11511
11512 if (!tree_nop_conversion (exp))
11513 return false;
11514
11515 outer_type = TREE_TYPE (exp);
11516 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11517
11518 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
11519 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
11520 }
11521
11522 /* Strip conversions from EXP according to tree_nop_conversion and
11523 return the resulting expression. */
11524
11525 tree
11526 tree_strip_nop_conversions (tree exp)
11527 {
11528 while (tree_nop_conversion (exp))
11529 exp = TREE_OPERAND (exp, 0);
11530 return exp;
11531 }
11532
11533 /* Strip conversions from EXP according to tree_sign_nop_conversion
11534 and return the resulting expression. */
11535
11536 tree
11537 tree_strip_sign_nop_conversions (tree exp)
11538 {
11539 while (tree_sign_nop_conversion (exp))
11540 exp = TREE_OPERAND (exp, 0);
11541 return exp;
11542 }
11543
11544 /* Avoid any floating point extensions from EXP. */
11545 tree
11546 strip_float_extensions (tree exp)
11547 {
11548 tree sub, expt, subt;
11549
11550 /* For floating point constant look up the narrowest type that can hold
11551 it properly and handle it like (type)(narrowest_type)constant.
11552 This way we can optimize for instance a=a*2.0 where "a" is float
11553 but 2.0 is double constant. */
11554 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
11555 {
11556 REAL_VALUE_TYPE orig;
11557 tree type = NULL;
11558
11559 orig = TREE_REAL_CST (exp);
11560 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
11561 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
11562 type = float_type_node;
11563 else if (TYPE_PRECISION (TREE_TYPE (exp))
11564 > TYPE_PRECISION (double_type_node)
11565 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
11566 type = double_type_node;
11567 if (type)
11568 return build_real (type, real_value_truncate (TYPE_MODE (type), orig));
11569 }
11570
11571 if (!CONVERT_EXPR_P (exp))
11572 return exp;
11573
11574 sub = TREE_OPERAND (exp, 0);
11575 subt = TREE_TYPE (sub);
11576 expt = TREE_TYPE (exp);
11577
11578 if (!FLOAT_TYPE_P (subt))
11579 return exp;
11580
11581 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
11582 return exp;
11583
11584 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
11585 return exp;
11586
11587 return strip_float_extensions (sub);
11588 }
11589
11590 /* Strip out all handled components that produce invariant
11591 offsets. */
11592
11593 const_tree
11594 strip_invariant_refs (const_tree op)
11595 {
11596 while (handled_component_p (op))
11597 {
11598 switch (TREE_CODE (op))
11599 {
11600 case ARRAY_REF:
11601 case ARRAY_RANGE_REF:
11602 if (!is_gimple_constant (TREE_OPERAND (op, 1))
11603 || TREE_OPERAND (op, 2) != NULL_TREE
11604 || TREE_OPERAND (op, 3) != NULL_TREE)
11605 return NULL;
11606 break;
11607
11608 case COMPONENT_REF:
11609 if (TREE_OPERAND (op, 2) != NULL_TREE)
11610 return NULL;
11611 break;
11612
11613 default:;
11614 }
11615 op = TREE_OPERAND (op, 0);
11616 }
11617
11618 return op;
11619 }
11620
11621 static GTY(()) tree gcc_eh_personality_decl;
11622
11623 /* Return the GCC personality function decl. */
11624
11625 tree
11626 lhd_gcc_personality (void)
11627 {
11628 if (!gcc_eh_personality_decl)
11629 gcc_eh_personality_decl = build_personality_function ("gcc");
11630 return gcc_eh_personality_decl;
11631 }
11632
11633 /* For languages with One Definition Rule, work out if
11634 trees are actually the same even if the tree representation
11635 differs. This handles only decls appearing in TYPE_NAME
11636 and TYPE_CONTEXT. That is NAMESPACE_DECL, TYPE_DECL,
11637 RECORD_TYPE and IDENTIFIER_NODE. */
11638
11639 static bool
11640 same_for_odr (tree t1, tree t2)
11641 {
11642 if (t1 == t2)
11643 return true;
11644 if (!t1 || !t2)
11645 return false;
11646 /* C and C++ FEs differ by using IDENTIFIER_NODE and TYPE_DECL. */
11647 if (TREE_CODE (t1) == IDENTIFIER_NODE
11648 && TREE_CODE (t2) == TYPE_DECL
11649 && DECL_FILE_SCOPE_P (t1))
11650 {
11651 t2 = DECL_NAME (t2);
11652 gcc_assert (TREE_CODE (t2) == IDENTIFIER_NODE);
11653 }
11654 if (TREE_CODE (t2) == IDENTIFIER_NODE
11655 && TREE_CODE (t1) == TYPE_DECL
11656 && DECL_FILE_SCOPE_P (t2))
11657 {
11658 t1 = DECL_NAME (t1);
11659 gcc_assert (TREE_CODE (t1) == IDENTIFIER_NODE);
11660 }
11661 if (TREE_CODE (t1) != TREE_CODE (t2))
11662 return false;
11663 if (TYPE_P (t1))
11664 return types_same_for_odr (t1, t2);
11665 if (DECL_P (t1))
11666 return decls_same_for_odr (t1, t2);
11667 return false;
11668 }
11669
11670 /* For languages with One Definition Rule, work out if
11671 decls are actually the same even if the tree representation
11672 differs. This handles only decls appearing in TYPE_NAME
11673 and TYPE_CONTEXT. That is NAMESPACE_DECL, TYPE_DECL,
11674 RECORD_TYPE and IDENTIFIER_NODE. */
11675
11676 static bool
11677 decls_same_for_odr (tree decl1, tree decl2)
11678 {
11679 if (decl1 && TREE_CODE (decl1) == TYPE_DECL
11680 && DECL_ORIGINAL_TYPE (decl1))
11681 decl1 = DECL_ORIGINAL_TYPE (decl1);
11682 if (decl2 && TREE_CODE (decl2) == TYPE_DECL
11683 && DECL_ORIGINAL_TYPE (decl2))
11684 decl2 = DECL_ORIGINAL_TYPE (decl2);
11685 if (decl1 == decl2)
11686 return true;
11687 if (!decl1 || !decl2)
11688 return false;
11689 gcc_checking_assert (DECL_P (decl1) && DECL_P (decl2));
11690 if (TREE_CODE (decl1) != TREE_CODE (decl2))
11691 return false;
11692 if (TREE_CODE (decl1) == TRANSLATION_UNIT_DECL)
11693 return true;
11694 if (TREE_CODE (decl1) != NAMESPACE_DECL
11695 && TREE_CODE (decl1) != TYPE_DECL)
11696 return false;
11697 if (!DECL_NAME (decl1))
11698 return false;
11699 gcc_checking_assert (TREE_CODE (DECL_NAME (decl1)) == IDENTIFIER_NODE);
11700 gcc_checking_assert (!DECL_NAME (decl2)
11701 || TREE_CODE (DECL_NAME (decl2)) == IDENTIFIER_NODE);
11702 if (DECL_NAME (decl1) != DECL_NAME (decl2))
11703 return false;
11704 return same_for_odr (DECL_CONTEXT (decl1),
11705 DECL_CONTEXT (decl2));
11706 }
11707
11708 /* For languages with One Definition Rule, work out if
11709 types are same even if the tree representation differs.
11710 This is non-trivial for LTO where minnor differences in
11711 the type representation may have prevented type merging
11712 to merge two copies of otherwise equivalent type. */
11713
11714 bool
11715 types_same_for_odr (tree type1, tree type2)
11716 {
11717 gcc_checking_assert (TYPE_P (type1) && TYPE_P (type2));
11718 type1 = TYPE_MAIN_VARIANT (type1);
11719 type2 = TYPE_MAIN_VARIANT (type2);
11720 if (type1 == type2)
11721 return true;
11722
11723 #ifndef ENABLE_CHECKING
11724 if (!in_lto_p)
11725 return false;
11726 #endif
11727
11728 /* Check for anonymous namespaces. Those have !TREE_PUBLIC
11729 on the corresponding TYPE_STUB_DECL. */
11730 if (type_in_anonymous_namespace_p (type1)
11731 || type_in_anonymous_namespace_p (type2))
11732 return false;
11733 /* When assembler name of virtual table is available, it is
11734 easy to compare types for equivalence. */
11735 if (TYPE_BINFO (type1) && TYPE_BINFO (type2)
11736 && BINFO_VTABLE (TYPE_BINFO (type1))
11737 && BINFO_VTABLE (TYPE_BINFO (type2)))
11738 {
11739 tree v1 = BINFO_VTABLE (TYPE_BINFO (type1));
11740 tree v2 = BINFO_VTABLE (TYPE_BINFO (type2));
11741
11742 if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
11743 {
11744 if (TREE_CODE (v2) != POINTER_PLUS_EXPR
11745 || !operand_equal_p (TREE_OPERAND (v1, 1),
11746 TREE_OPERAND (v2, 1), 0))
11747 return false;
11748 v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
11749 v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
11750 }
11751 v1 = DECL_ASSEMBLER_NAME (v1);
11752 v2 = DECL_ASSEMBLER_NAME (v2);
11753 return (v1 == v2);
11754 }
11755
11756 /* FIXME: the code comparing type names consider all instantiations of the
11757 same template to have same name. This is because we have no access
11758 to template parameters. For types with no virtual method tables
11759 we thus can return false positives. At the moment we do not need
11760 to compare types in other scenarios than devirtualization. */
11761
11762 /* If types are not structuraly same, do not bother to contnue.
11763 Match in the remainder of code would mean ODR violation. */
11764 if (!types_compatible_p (type1, type2))
11765 return false;
11766 if (!TYPE_NAME (type1))
11767 return false;
11768 if (!decls_same_for_odr (TYPE_NAME (type1), TYPE_NAME (type2)))
11769 return false;
11770 if (!same_for_odr (TYPE_CONTEXT (type1), TYPE_CONTEXT (type2)))
11771 return false;
11772 /* When not in LTO the MAIN_VARIANT check should be the same. */
11773 gcc_assert (in_lto_p);
11774
11775 return true;
11776 }
11777
11778 /* TARGET is a call target of GIMPLE call statement
11779 (obtained by gimple_call_fn). Return true if it is
11780 OBJ_TYPE_REF representing an virtual call of C++ method.
11781 (As opposed to OBJ_TYPE_REF representing objc calls
11782 through a cast where middle-end devirtualization machinery
11783 can't apply.) */
11784
11785 bool
11786 virtual_method_call_p (tree target)
11787 {
11788 if (TREE_CODE (target) != OBJ_TYPE_REF)
11789 return false;
11790 target = TREE_TYPE (target);
11791 gcc_checking_assert (TREE_CODE (target) == POINTER_TYPE);
11792 target = TREE_TYPE (target);
11793 if (TREE_CODE (target) == FUNCTION_TYPE)
11794 return false;
11795 gcc_checking_assert (TREE_CODE (target) == METHOD_TYPE);
11796 return true;
11797 }
11798
11799 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
11800
11801 tree
11802 obj_type_ref_class (tree ref)
11803 {
11804 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
11805 ref = TREE_TYPE (ref);
11806 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
11807 ref = TREE_TYPE (ref);
11808 /* We look for type THIS points to. ObjC also builds
11809 OBJ_TYPE_REF with non-method calls, Their first parameter
11810 ID however also corresponds to class type. */
11811 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
11812 || TREE_CODE (ref) == FUNCTION_TYPE);
11813 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
11814 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
11815 return TREE_TYPE (ref);
11816 }
11817
11818 /* Return true if T is in anonymous namespace. */
11819
11820 bool
11821 type_in_anonymous_namespace_p (tree t)
11822 {
11823 return (TYPE_STUB_DECL (t) && !TREE_PUBLIC (TYPE_STUB_DECL (t)));
11824 }
11825
11826 /* Try to find a base info of BINFO that would have its field decl at offset
11827 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
11828 found, return, otherwise return NULL_TREE. */
11829
11830 tree
11831 get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
11832 {
11833 tree type = BINFO_TYPE (binfo);
11834
11835 while (true)
11836 {
11837 HOST_WIDE_INT pos, size;
11838 tree fld;
11839 int i;
11840
11841 if (types_same_for_odr (type, expected_type))
11842 return binfo;
11843 if (offset < 0)
11844 return NULL_TREE;
11845
11846 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
11847 {
11848 if (TREE_CODE (fld) != FIELD_DECL)
11849 continue;
11850
11851 pos = int_bit_position (fld);
11852 size = tree_to_uhwi (DECL_SIZE (fld));
11853 if (pos <= offset && (pos + size) > offset)
11854 break;
11855 }
11856 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
11857 return NULL_TREE;
11858
11859 if (!DECL_ARTIFICIAL (fld))
11860 {
11861 binfo = TYPE_BINFO (TREE_TYPE (fld));
11862 if (!binfo)
11863 return NULL_TREE;
11864 }
11865 /* Offset 0 indicates the primary base, whose vtable contents are
11866 represented in the binfo for the derived class. */
11867 else if (offset != 0)
11868 {
11869 tree base_binfo, found_binfo = NULL_TREE;
11870 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
11871 if (types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
11872 {
11873 found_binfo = base_binfo;
11874 break;
11875 }
11876 if (!found_binfo)
11877 return NULL_TREE;
11878 binfo = found_binfo;
11879 }
11880
11881 type = TREE_TYPE (fld);
11882 offset -= pos;
11883 }
11884 }
11885
11886 /* Returns true if X is a typedef decl. */
11887
11888 bool
11889 is_typedef_decl (tree x)
11890 {
11891 return (x && TREE_CODE (x) == TYPE_DECL
11892 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
11893 }
11894
11895 /* Returns true iff TYPE is a type variant created for a typedef. */
11896
11897 bool
11898 typedef_variant_p (tree type)
11899 {
11900 return is_typedef_decl (TYPE_NAME (type));
11901 }
11902
11903 /* Warn about a use of an identifier which was marked deprecated. */
11904 void
11905 warn_deprecated_use (tree node, tree attr)
11906 {
11907 const char *msg;
11908
11909 if (node == 0 || !warn_deprecated_decl)
11910 return;
11911
11912 if (!attr)
11913 {
11914 if (DECL_P (node))
11915 attr = DECL_ATTRIBUTES (node);
11916 else if (TYPE_P (node))
11917 {
11918 tree decl = TYPE_STUB_DECL (node);
11919 if (decl)
11920 attr = lookup_attribute ("deprecated",
11921 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
11922 }
11923 }
11924
11925 if (attr)
11926 attr = lookup_attribute ("deprecated", attr);
11927
11928 if (attr)
11929 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
11930 else
11931 msg = NULL;
11932
11933 if (DECL_P (node))
11934 {
11935 expanded_location xloc = expand_location (DECL_SOURCE_LOCATION (node));
11936 if (msg)
11937 warning (OPT_Wdeprecated_declarations,
11938 "%qD is deprecated (declared at %r%s:%d%R): %s",
11939 node, "locus", xloc.file, xloc.line, msg);
11940 else
11941 warning (OPT_Wdeprecated_declarations,
11942 "%qD is deprecated (declared at %r%s:%d%R)",
11943 node, "locus", xloc.file, xloc.line);
11944 }
11945 else if (TYPE_P (node))
11946 {
11947 tree what = NULL_TREE;
11948 tree decl = TYPE_STUB_DECL (node);
11949
11950 if (TYPE_NAME (node))
11951 {
11952 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
11953 what = TYPE_NAME (node);
11954 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
11955 && DECL_NAME (TYPE_NAME (node)))
11956 what = DECL_NAME (TYPE_NAME (node));
11957 }
11958
11959 if (decl)
11960 {
11961 expanded_location xloc
11962 = expand_location (DECL_SOURCE_LOCATION (decl));
11963 if (what)
11964 {
11965 if (msg)
11966 warning (OPT_Wdeprecated_declarations,
11967 "%qE is deprecated (declared at %r%s:%d%R): %s",
11968 what, "locus", xloc.file, xloc.line, msg);
11969 else
11970 warning (OPT_Wdeprecated_declarations,
11971 "%qE is deprecated (declared at %r%s:%d%R)",
11972 what, "locus", xloc.file, xloc.line);
11973 }
11974 else
11975 {
11976 if (msg)
11977 warning (OPT_Wdeprecated_declarations,
11978 "type is deprecated (declared at %r%s:%d%R): %s",
11979 "locus", xloc.file, xloc.line, msg);
11980 else
11981 warning (OPT_Wdeprecated_declarations,
11982 "type is deprecated (declared at %r%s:%d%R)",
11983 "locus", xloc.file, xloc.line);
11984 }
11985 }
11986 else
11987 {
11988 if (what)
11989 {
11990 if (msg)
11991 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
11992 what, msg);
11993 else
11994 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
11995 }
11996 else
11997 {
11998 if (msg)
11999 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
12000 msg);
12001 else
12002 warning (OPT_Wdeprecated_declarations, "type is deprecated");
12003 }
12004 }
12005 }
12006 }
12007
12008 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12009 somewhere in it. */
12010
12011 bool
12012 contains_bitfld_component_ref_p (const_tree ref)
12013 {
12014 while (handled_component_p (ref))
12015 {
12016 if (TREE_CODE (ref) == COMPONENT_REF
12017 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12018 return true;
12019 ref = TREE_OPERAND (ref, 0);
12020 }
12021
12022 return false;
12023 }
12024
12025 /* Try to determine whether a TRY_CATCH expression can fall through.
12026 This is a subroutine of block_may_fallthru. */
12027
12028 static bool
12029 try_catch_may_fallthru (const_tree stmt)
12030 {
12031 tree_stmt_iterator i;
12032
12033 /* If the TRY block can fall through, the whole TRY_CATCH can
12034 fall through. */
12035 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12036 return true;
12037
12038 i = tsi_start (TREE_OPERAND (stmt, 1));
12039 switch (TREE_CODE (tsi_stmt (i)))
12040 {
12041 case CATCH_EXPR:
12042 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12043 catch expression and a body. The whole TRY_CATCH may fall
12044 through iff any of the catch bodies falls through. */
12045 for (; !tsi_end_p (i); tsi_next (&i))
12046 {
12047 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12048 return true;
12049 }
12050 return false;
12051
12052 case EH_FILTER_EXPR:
12053 /* The exception filter expression only matters if there is an
12054 exception. If the exception does not match EH_FILTER_TYPES,
12055 we will execute EH_FILTER_FAILURE, and we will fall through
12056 if that falls through. If the exception does match
12057 EH_FILTER_TYPES, the stack unwinder will continue up the
12058 stack, so we will not fall through. We don't know whether we
12059 will throw an exception which matches EH_FILTER_TYPES or not,
12060 so we just ignore EH_FILTER_TYPES and assume that we might
12061 throw an exception which doesn't match. */
12062 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12063
12064 default:
12065 /* This case represents statements to be executed when an
12066 exception occurs. Those statements are implicitly followed
12067 by a RESX statement to resume execution after the exception.
12068 So in this case the TRY_CATCH never falls through. */
12069 return false;
12070 }
12071 }
12072
12073 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12074 need not be 100% accurate; simply be conservative and return true if we
12075 don't know. This is used only to avoid stupidly generating extra code.
12076 If we're wrong, we'll just delete the extra code later. */
12077
12078 bool
12079 block_may_fallthru (const_tree block)
12080 {
12081 /* This CONST_CAST is okay because expr_last returns its argument
12082 unmodified and we assign it to a const_tree. */
12083 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12084
12085 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12086 {
12087 case GOTO_EXPR:
12088 case RETURN_EXPR:
12089 /* Easy cases. If the last statement of the block implies
12090 control transfer, then we can't fall through. */
12091 return false;
12092
12093 case SWITCH_EXPR:
12094 /* If SWITCH_LABELS is set, this is lowered, and represents a
12095 branch to a selected label and hence can not fall through.
12096 Otherwise SWITCH_BODY is set, and the switch can fall
12097 through. */
12098 return SWITCH_LABELS (stmt) == NULL_TREE;
12099
12100 case COND_EXPR:
12101 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12102 return true;
12103 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12104
12105 case BIND_EXPR:
12106 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12107
12108 case TRY_CATCH_EXPR:
12109 return try_catch_may_fallthru (stmt);
12110
12111 case TRY_FINALLY_EXPR:
12112 /* The finally clause is always executed after the try clause,
12113 so if it does not fall through, then the try-finally will not
12114 fall through. Otherwise, if the try clause does not fall
12115 through, then when the finally clause falls through it will
12116 resume execution wherever the try clause was going. So the
12117 whole try-finally will only fall through if both the try
12118 clause and the finally clause fall through. */
12119 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12120 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12121
12122 case MODIFY_EXPR:
12123 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12124 stmt = TREE_OPERAND (stmt, 1);
12125 else
12126 return true;
12127 /* FALLTHRU */
12128
12129 case CALL_EXPR:
12130 /* Functions that do not return do not fall through. */
12131 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12132
12133 case CLEANUP_POINT_EXPR:
12134 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12135
12136 case TARGET_EXPR:
12137 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12138
12139 case ERROR_MARK:
12140 return true;
12141
12142 default:
12143 return lang_hooks.block_may_fallthru (stmt);
12144 }
12145 }
12146
12147 /* True if we are using EH to handle cleanups. */
12148 static bool using_eh_for_cleanups_flag = false;
12149
12150 /* This routine is called from front ends to indicate eh should be used for
12151 cleanups. */
12152 void
12153 using_eh_for_cleanups (void)
12154 {
12155 using_eh_for_cleanups_flag = true;
12156 }
12157
12158 /* Query whether EH is used for cleanups. */
12159 bool
12160 using_eh_for_cleanups_p (void)
12161 {
12162 return using_eh_for_cleanups_flag;
12163 }
12164
12165 /* Wrapper for tree_code_name to ensure that tree code is valid */
12166 const char *
12167 get_tree_code_name (enum tree_code code)
12168 {
12169 const char *invalid = "<invalid tree code>";
12170
12171 if (code >= MAX_TREE_CODES)
12172 return invalid;
12173
12174 return tree_code_name[code];
12175 }
12176
12177 /* Drops the TREE_OVERFLOW flag from T. */
12178
12179 tree
12180 drop_tree_overflow (tree t)
12181 {
12182 gcc_checking_assert (TREE_OVERFLOW (t));
12183
12184 /* For tree codes with a sharing machinery re-build the result. */
12185 if (TREE_CODE (t) == INTEGER_CST)
12186 return wide_int_to_tree (TREE_TYPE (t), t);
12187
12188 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12189 and drop the flag. */
12190 t = copy_node (t);
12191 TREE_OVERFLOW (t) = 0;
12192 return t;
12193 }
12194
12195 /* Given a memory reference expression T, return its base address.
12196 The base address of a memory reference expression is the main
12197 object being referenced. For instance, the base address for
12198 'array[i].fld[j]' is 'array'. You can think of this as stripping
12199 away the offset part from a memory address.
12200
12201 This function calls handled_component_p to strip away all the inner
12202 parts of the memory reference until it reaches the base object. */
12203
12204 tree
12205 get_base_address (tree t)
12206 {
12207 while (handled_component_p (t))
12208 t = TREE_OPERAND (t, 0);
12209
12210 if ((TREE_CODE (t) == MEM_REF
12211 || TREE_CODE (t) == TARGET_MEM_REF)
12212 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12213 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12214
12215 /* ??? Either the alias oracle or all callers need to properly deal
12216 with WITH_SIZE_EXPRs before we can look through those. */
12217 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12218 return NULL_TREE;
12219
12220 return t;
12221 }
12222
12223 #include "gt-tree.h"