]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree.c
gcc/ada/
[thirdparty/gcc.git] / gcc / tree.c
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
26
27 It is intended to be language-independent, but occasionally
28 calls language-dependent routines defined (for C) in typecheck.c. */
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "tm.h"
34 #include "flags.h"
35 #include "tree.h"
36 #include "tm_p.h"
37 #include "function.h"
38 #include "obstack.h"
39 #include "toplev.h" /* get_random_seed */
40 #include "ggc.h"
41 #include "hashtab.h"
42 #include "filenames.h"
43 #include "output.h"
44 #include "target.h"
45 #include "common/common-target.h"
46 #include "langhooks.h"
47 #include "tree-inline.h"
48 #include "tree-iterator.h"
49 #include "basic-block.h"
50 #include "bitmap.h"
51 #include "gimple.h"
52 #include "gimple-iterator.h"
53 #include "gimplify.h"
54 #include "gimple-ssa.h"
55 #include "cgraph.h"
56 #include "tree-phinodes.h"
57 #include "tree-ssanames.h"
58 #include "tree-dfa.h"
59 #include "params.h"
60 #include "pointer-set.h"
61 #include "tree-pass.h"
62 #include "langhooks-def.h"
63 #include "diagnostic.h"
64 #include "tree-diagnostic.h"
65 #include "tree-pretty-print.h"
66 #include "except.h"
67 #include "debug.h"
68 #include "intl.h"
69
70 /* Tree code classes. */
71
72 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
73 #define END_OF_BASE_TREE_CODES tcc_exceptional,
74
75 const enum tree_code_class tree_code_type[] = {
76 #include "all-tree.def"
77 };
78
79 #undef DEFTREECODE
80 #undef END_OF_BASE_TREE_CODES
81
82 /* Table indexed by tree code giving number of expression
83 operands beyond the fixed part of the node structure.
84 Not used for types or decls. */
85
86 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
87 #define END_OF_BASE_TREE_CODES 0,
88
89 const unsigned char tree_code_length[] = {
90 #include "all-tree.def"
91 };
92
93 #undef DEFTREECODE
94 #undef END_OF_BASE_TREE_CODES
95
96 /* Names of tree components.
97 Used for printing out the tree and error messages. */
98 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
99 #define END_OF_BASE_TREE_CODES "@dummy",
100
101 static const char *const tree_code_name[] = {
102 #include "all-tree.def"
103 };
104
105 #undef DEFTREECODE
106 #undef END_OF_BASE_TREE_CODES
107
108 /* Each tree code class has an associated string representation.
109 These must correspond to the tree_code_class entries. */
110
111 const char *const tree_code_class_strings[] =
112 {
113 "exceptional",
114 "constant",
115 "type",
116 "declaration",
117 "reference",
118 "comparison",
119 "unary",
120 "binary",
121 "statement",
122 "vl_exp",
123 "expression"
124 };
125
126 /* obstack.[ch] explicitly declined to prototype this. */
127 extern int _obstack_allocated_p (struct obstack *h, void *obj);
128
129 /* Statistics-gathering stuff. */
130
131 static int tree_code_counts[MAX_TREE_CODES];
132 int tree_node_counts[(int) all_kinds];
133 int tree_node_sizes[(int) all_kinds];
134
135 /* Keep in sync with tree.h:enum tree_node_kind. */
136 static const char * const tree_node_kind_names[] = {
137 "decls",
138 "types",
139 "blocks",
140 "stmts",
141 "refs",
142 "exprs",
143 "constants",
144 "identifiers",
145 "vecs",
146 "binfos",
147 "ssa names",
148 "constructors",
149 "random kinds",
150 "lang_decl kinds",
151 "lang_type kinds",
152 "omp clauses",
153 };
154
155 /* Unique id for next decl created. */
156 static GTY(()) int next_decl_uid;
157 /* Unique id for next type created. */
158 static GTY(()) int next_type_uid = 1;
159 /* Unique id for next debug decl created. Use negative numbers,
160 to catch erroneous uses. */
161 static GTY(()) int next_debug_decl_uid;
162
163 /* Since we cannot rehash a type after it is in the table, we have to
164 keep the hash code. */
165
166 struct GTY(()) type_hash {
167 unsigned long hash;
168 tree type;
169 };
170
171 /* Initial size of the hash table (rounded to next prime). */
172 #define TYPE_HASH_INITIAL_SIZE 1000
173
174 /* Now here is the hash table. When recording a type, it is added to
175 the slot whose index is the hash code. Note that the hash table is
176 used for several kinds of types (function types, array types and
177 array index range types, for now). While all these live in the
178 same table, they are completely independent, and the hash code is
179 computed differently for each of these. */
180
181 static GTY ((if_marked ("type_hash_marked_p"), param_is (struct type_hash)))
182 htab_t type_hash_table;
183
184 /* Hash table and temporary node for larger integer const values. */
185 static GTY (()) tree int_cst_node;
186 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
187 htab_t int_cst_hash_table;
188
189 /* Hash table for optimization flags and target option flags. Use the same
190 hash table for both sets of options. Nodes for building the current
191 optimization and target option nodes. The assumption is most of the time
192 the options created will already be in the hash table, so we avoid
193 allocating and freeing up a node repeatably. */
194 static GTY (()) tree cl_optimization_node;
195 static GTY (()) tree cl_target_option_node;
196 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
197 htab_t cl_option_hash_table;
198
199 /* General tree->tree mapping structure for use in hash tables. */
200
201
202 static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map)))
203 htab_t debug_expr_for_decl;
204
205 static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map)))
206 htab_t value_expr_for_decl;
207
208 static GTY ((if_marked ("tree_vec_map_marked_p"), param_is (struct tree_vec_map)))
209 htab_t debug_args_for_decl;
210
211 static GTY ((if_marked ("tree_priority_map_marked_p"),
212 param_is (struct tree_priority_map)))
213 htab_t init_priority_for_decl;
214
215 static void set_type_quals (tree, int);
216 static int type_hash_eq (const void *, const void *);
217 static hashval_t type_hash_hash (const void *);
218 static hashval_t int_cst_hash_hash (const void *);
219 static int int_cst_hash_eq (const void *, const void *);
220 static hashval_t cl_option_hash_hash (const void *);
221 static int cl_option_hash_eq (const void *, const void *);
222 static void print_type_hash_statistics (void);
223 static void print_debug_expr_statistics (void);
224 static void print_value_expr_statistics (void);
225 static int type_hash_marked_p (const void *);
226 static unsigned int type_hash_list (const_tree, hashval_t);
227 static unsigned int attribute_hash_list (const_tree, hashval_t);
228 static bool decls_same_for_odr (tree decl1, tree decl2);
229
230 tree global_trees[TI_MAX];
231 tree integer_types[itk_none];
232
233 unsigned char tree_contains_struct[MAX_TREE_CODES][64];
234
235 /* Number of operands for each OpenMP clause. */
236 unsigned const char omp_clause_num_ops[] =
237 {
238 0, /* OMP_CLAUSE_ERROR */
239 1, /* OMP_CLAUSE_PRIVATE */
240 1, /* OMP_CLAUSE_SHARED */
241 1, /* OMP_CLAUSE_FIRSTPRIVATE */
242 2, /* OMP_CLAUSE_LASTPRIVATE */
243 4, /* OMP_CLAUSE_REDUCTION */
244 1, /* OMP_CLAUSE_COPYIN */
245 1, /* OMP_CLAUSE_COPYPRIVATE */
246 2, /* OMP_CLAUSE_LINEAR */
247 2, /* OMP_CLAUSE_ALIGNED */
248 1, /* OMP_CLAUSE_DEPEND */
249 1, /* OMP_CLAUSE_UNIFORM */
250 2, /* OMP_CLAUSE_FROM */
251 2, /* OMP_CLAUSE_TO */
252 2, /* OMP_CLAUSE_MAP */
253 1, /* OMP_CLAUSE__LOOPTEMP_ */
254 1, /* OMP_CLAUSE_IF */
255 1, /* OMP_CLAUSE_NUM_THREADS */
256 1, /* OMP_CLAUSE_SCHEDULE */
257 0, /* OMP_CLAUSE_NOWAIT */
258 0, /* OMP_CLAUSE_ORDERED */
259 0, /* OMP_CLAUSE_DEFAULT */
260 3, /* OMP_CLAUSE_COLLAPSE */
261 0, /* OMP_CLAUSE_UNTIED */
262 1, /* OMP_CLAUSE_FINAL */
263 0, /* OMP_CLAUSE_MERGEABLE */
264 1, /* OMP_CLAUSE_DEVICE */
265 1, /* OMP_CLAUSE_DIST_SCHEDULE */
266 0, /* OMP_CLAUSE_INBRANCH */
267 0, /* OMP_CLAUSE_NOTINBRANCH */
268 1, /* OMP_CLAUSE_NUM_TEAMS */
269 1, /* OMP_CLAUSE_THREAD_LIMIT */
270 0, /* OMP_CLAUSE_PROC_BIND */
271 1, /* OMP_CLAUSE_SAFELEN */
272 1, /* OMP_CLAUSE_SIMDLEN */
273 0, /* OMP_CLAUSE_FOR */
274 0, /* OMP_CLAUSE_PARALLEL */
275 0, /* OMP_CLAUSE_SECTIONS */
276 0, /* OMP_CLAUSE_TASKGROUP */
277 1, /* OMP_CLAUSE__SIMDUID_ */
278 };
279
280 const char * const omp_clause_code_name[] =
281 {
282 "error_clause",
283 "private",
284 "shared",
285 "firstprivate",
286 "lastprivate",
287 "reduction",
288 "copyin",
289 "copyprivate",
290 "linear",
291 "aligned",
292 "depend",
293 "uniform",
294 "from",
295 "to",
296 "map",
297 "_looptemp_",
298 "if",
299 "num_threads",
300 "schedule",
301 "nowait",
302 "ordered",
303 "default",
304 "collapse",
305 "untied",
306 "final",
307 "mergeable",
308 "device",
309 "dist_schedule",
310 "inbranch",
311 "notinbranch",
312 "num_teams",
313 "thread_limit",
314 "proc_bind",
315 "safelen",
316 "simdlen",
317 "for",
318 "parallel",
319 "sections",
320 "taskgroup",
321 "_simduid_"
322 };
323
324
325 /* Return the tree node structure used by tree code CODE. */
326
327 static inline enum tree_node_structure_enum
328 tree_node_structure_for_code (enum tree_code code)
329 {
330 switch (TREE_CODE_CLASS (code))
331 {
332 case tcc_declaration:
333 {
334 switch (code)
335 {
336 case FIELD_DECL:
337 return TS_FIELD_DECL;
338 case PARM_DECL:
339 return TS_PARM_DECL;
340 case VAR_DECL:
341 return TS_VAR_DECL;
342 case LABEL_DECL:
343 return TS_LABEL_DECL;
344 case RESULT_DECL:
345 return TS_RESULT_DECL;
346 case DEBUG_EXPR_DECL:
347 return TS_DECL_WRTL;
348 case CONST_DECL:
349 return TS_CONST_DECL;
350 case TYPE_DECL:
351 return TS_TYPE_DECL;
352 case FUNCTION_DECL:
353 return TS_FUNCTION_DECL;
354 case TRANSLATION_UNIT_DECL:
355 return TS_TRANSLATION_UNIT_DECL;
356 default:
357 return TS_DECL_NON_COMMON;
358 }
359 }
360 case tcc_type:
361 return TS_TYPE_NON_COMMON;
362 case tcc_reference:
363 case tcc_comparison:
364 case tcc_unary:
365 case tcc_binary:
366 case tcc_expression:
367 case tcc_statement:
368 case tcc_vl_exp:
369 return TS_EXP;
370 default: /* tcc_constant and tcc_exceptional */
371 break;
372 }
373 switch (code)
374 {
375 /* tcc_constant cases. */
376 case INTEGER_CST: return TS_INT_CST;
377 case REAL_CST: return TS_REAL_CST;
378 case FIXED_CST: return TS_FIXED_CST;
379 case COMPLEX_CST: return TS_COMPLEX;
380 case VECTOR_CST: return TS_VECTOR;
381 case STRING_CST: return TS_STRING;
382 /* tcc_exceptional cases. */
383 case ERROR_MARK: return TS_COMMON;
384 case IDENTIFIER_NODE: return TS_IDENTIFIER;
385 case TREE_LIST: return TS_LIST;
386 case TREE_VEC: return TS_VEC;
387 case SSA_NAME: return TS_SSA_NAME;
388 case PLACEHOLDER_EXPR: return TS_COMMON;
389 case STATEMENT_LIST: return TS_STATEMENT_LIST;
390 case BLOCK: return TS_BLOCK;
391 case CONSTRUCTOR: return TS_CONSTRUCTOR;
392 case TREE_BINFO: return TS_BINFO;
393 case OMP_CLAUSE: return TS_OMP_CLAUSE;
394 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
395 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
396
397 default:
398 gcc_unreachable ();
399 }
400 }
401
402
403 /* Initialize tree_contains_struct to describe the hierarchy of tree
404 nodes. */
405
406 static void
407 initialize_tree_contains_struct (void)
408 {
409 unsigned i;
410
411 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
412 {
413 enum tree_code code;
414 enum tree_node_structure_enum ts_code;
415
416 code = (enum tree_code) i;
417 ts_code = tree_node_structure_for_code (code);
418
419 /* Mark the TS structure itself. */
420 tree_contains_struct[code][ts_code] = 1;
421
422 /* Mark all the structures that TS is derived from. */
423 switch (ts_code)
424 {
425 case TS_TYPED:
426 case TS_BLOCK:
427 MARK_TS_BASE (code);
428 break;
429
430 case TS_COMMON:
431 case TS_INT_CST:
432 case TS_REAL_CST:
433 case TS_FIXED_CST:
434 case TS_VECTOR:
435 case TS_STRING:
436 case TS_COMPLEX:
437 case TS_SSA_NAME:
438 case TS_CONSTRUCTOR:
439 case TS_EXP:
440 case TS_STATEMENT_LIST:
441 MARK_TS_TYPED (code);
442 break;
443
444 case TS_IDENTIFIER:
445 case TS_DECL_MINIMAL:
446 case TS_TYPE_COMMON:
447 case TS_LIST:
448 case TS_VEC:
449 case TS_BINFO:
450 case TS_OMP_CLAUSE:
451 case TS_OPTIMIZATION:
452 case TS_TARGET_OPTION:
453 MARK_TS_COMMON (code);
454 break;
455
456 case TS_TYPE_WITH_LANG_SPECIFIC:
457 MARK_TS_TYPE_COMMON (code);
458 break;
459
460 case TS_TYPE_NON_COMMON:
461 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
462 break;
463
464 case TS_DECL_COMMON:
465 MARK_TS_DECL_MINIMAL (code);
466 break;
467
468 case TS_DECL_WRTL:
469 case TS_CONST_DECL:
470 MARK_TS_DECL_COMMON (code);
471 break;
472
473 case TS_DECL_NON_COMMON:
474 MARK_TS_DECL_WITH_VIS (code);
475 break;
476
477 case TS_DECL_WITH_VIS:
478 case TS_PARM_DECL:
479 case TS_LABEL_DECL:
480 case TS_RESULT_DECL:
481 MARK_TS_DECL_WRTL (code);
482 break;
483
484 case TS_FIELD_DECL:
485 MARK_TS_DECL_COMMON (code);
486 break;
487
488 case TS_VAR_DECL:
489 MARK_TS_DECL_WITH_VIS (code);
490 break;
491
492 case TS_TYPE_DECL:
493 case TS_FUNCTION_DECL:
494 MARK_TS_DECL_NON_COMMON (code);
495 break;
496
497 case TS_TRANSLATION_UNIT_DECL:
498 MARK_TS_DECL_COMMON (code);
499 break;
500
501 default:
502 gcc_unreachable ();
503 }
504 }
505
506 /* Basic consistency checks for attributes used in fold. */
507 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
508 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
509 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
510 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
511 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
512 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
513 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
514 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
515 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
516 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
517 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
518 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
519 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
520 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
521 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
522 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
523 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
524 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
525 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
526 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
527 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
528 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
529 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
530 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
531 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
532 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
533 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
534 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
535 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
536 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
537 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
538 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
539 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
540 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
541 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
542 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
543 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
544 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
545 }
546
547
548 /* Init tree.c. */
549
550 void
551 init_ttree (void)
552 {
553 /* Initialize the hash table of types. */
554 type_hash_table = htab_create_ggc (TYPE_HASH_INITIAL_SIZE, type_hash_hash,
555 type_hash_eq, 0);
556
557 debug_expr_for_decl = htab_create_ggc (512, tree_decl_map_hash,
558 tree_decl_map_eq, 0);
559
560 value_expr_for_decl = htab_create_ggc (512, tree_decl_map_hash,
561 tree_decl_map_eq, 0);
562 init_priority_for_decl = htab_create_ggc (512, tree_priority_map_hash,
563 tree_priority_map_eq, 0);
564
565 int_cst_hash_table = htab_create_ggc (1024, int_cst_hash_hash,
566 int_cst_hash_eq, NULL);
567
568 int_cst_node = make_node (INTEGER_CST);
569
570 cl_option_hash_table = htab_create_ggc (64, cl_option_hash_hash,
571 cl_option_hash_eq, NULL);
572
573 cl_optimization_node = make_node (OPTIMIZATION_NODE);
574 cl_target_option_node = make_node (TARGET_OPTION_NODE);
575
576 /* Initialize the tree_contains_struct array. */
577 initialize_tree_contains_struct ();
578 lang_hooks.init_ts ();
579 }
580
581 \f
582 /* The name of the object as the assembler will see it (but before any
583 translations made by ASM_OUTPUT_LABELREF). Often this is the same
584 as DECL_NAME. It is an IDENTIFIER_NODE. */
585 tree
586 decl_assembler_name (tree decl)
587 {
588 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
589 lang_hooks.set_decl_assembler_name (decl);
590 return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name;
591 }
592
593 /* Compute the number of bytes occupied by a tree with code CODE.
594 This function cannot be used for nodes that have variable sizes,
595 including TREE_VEC, STRING_CST, and CALL_EXPR. */
596 size_t
597 tree_code_size (enum tree_code code)
598 {
599 switch (TREE_CODE_CLASS (code))
600 {
601 case tcc_declaration: /* A decl node */
602 {
603 switch (code)
604 {
605 case FIELD_DECL:
606 return sizeof (struct tree_field_decl);
607 case PARM_DECL:
608 return sizeof (struct tree_parm_decl);
609 case VAR_DECL:
610 return sizeof (struct tree_var_decl);
611 case LABEL_DECL:
612 return sizeof (struct tree_label_decl);
613 case RESULT_DECL:
614 return sizeof (struct tree_result_decl);
615 case CONST_DECL:
616 return sizeof (struct tree_const_decl);
617 case TYPE_DECL:
618 return sizeof (struct tree_type_decl);
619 case FUNCTION_DECL:
620 return sizeof (struct tree_function_decl);
621 case DEBUG_EXPR_DECL:
622 return sizeof (struct tree_decl_with_rtl);
623 default:
624 return sizeof (struct tree_decl_non_common);
625 }
626 }
627
628 case tcc_type: /* a type node */
629 return sizeof (struct tree_type_non_common);
630
631 case tcc_reference: /* a reference */
632 case tcc_expression: /* an expression */
633 case tcc_statement: /* an expression with side effects */
634 case tcc_comparison: /* a comparison expression */
635 case tcc_unary: /* a unary arithmetic expression */
636 case tcc_binary: /* a binary arithmetic expression */
637 return (sizeof (struct tree_exp)
638 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
639
640 case tcc_constant: /* a constant */
641 switch (code)
642 {
643 case INTEGER_CST: return sizeof (struct tree_int_cst);
644 case REAL_CST: return sizeof (struct tree_real_cst);
645 case FIXED_CST: return sizeof (struct tree_fixed_cst);
646 case COMPLEX_CST: return sizeof (struct tree_complex);
647 case VECTOR_CST: return sizeof (struct tree_vector);
648 case STRING_CST: gcc_unreachable ();
649 default:
650 return lang_hooks.tree_size (code);
651 }
652
653 case tcc_exceptional: /* something random, like an identifier. */
654 switch (code)
655 {
656 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
657 case TREE_LIST: return sizeof (struct tree_list);
658
659 case ERROR_MARK:
660 case PLACEHOLDER_EXPR: return sizeof (struct tree_common);
661
662 case TREE_VEC:
663 case OMP_CLAUSE: gcc_unreachable ();
664
665 case SSA_NAME: return sizeof (struct tree_ssa_name);
666
667 case STATEMENT_LIST: return sizeof (struct tree_statement_list);
668 case BLOCK: return sizeof (struct tree_block);
669 case CONSTRUCTOR: return sizeof (struct tree_constructor);
670 case OPTIMIZATION_NODE: return sizeof (struct tree_optimization_option);
671 case TARGET_OPTION_NODE: return sizeof (struct tree_target_option);
672
673 default:
674 return lang_hooks.tree_size (code);
675 }
676
677 default:
678 gcc_unreachable ();
679 }
680 }
681
682 /* Compute the number of bytes occupied by NODE. This routine only
683 looks at TREE_CODE, except for those nodes that have variable sizes. */
684 size_t
685 tree_size (const_tree node)
686 {
687 const enum tree_code code = TREE_CODE (node);
688 switch (code)
689 {
690 case TREE_BINFO:
691 return (offsetof (struct tree_binfo, base_binfos)
692 + vec<tree, va_gc>
693 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
694
695 case TREE_VEC:
696 return (sizeof (struct tree_vec)
697 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
698
699 case VECTOR_CST:
700 return (sizeof (struct tree_vector)
701 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node)) - 1) * sizeof (tree));
702
703 case STRING_CST:
704 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
705
706 case OMP_CLAUSE:
707 return (sizeof (struct tree_omp_clause)
708 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
709 * sizeof (tree));
710
711 default:
712 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
713 return (sizeof (struct tree_exp)
714 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
715 else
716 return tree_code_size (code);
717 }
718 }
719
720 /* Record interesting allocation statistics for a tree node with CODE
721 and LENGTH. */
722
723 static void
724 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
725 size_t length ATTRIBUTE_UNUSED)
726 {
727 enum tree_code_class type = TREE_CODE_CLASS (code);
728 tree_node_kind kind;
729
730 if (!GATHER_STATISTICS)
731 return;
732
733 switch (type)
734 {
735 case tcc_declaration: /* A decl node */
736 kind = d_kind;
737 break;
738
739 case tcc_type: /* a type node */
740 kind = t_kind;
741 break;
742
743 case tcc_statement: /* an expression with side effects */
744 kind = s_kind;
745 break;
746
747 case tcc_reference: /* a reference */
748 kind = r_kind;
749 break;
750
751 case tcc_expression: /* an expression */
752 case tcc_comparison: /* a comparison expression */
753 case tcc_unary: /* a unary arithmetic expression */
754 case tcc_binary: /* a binary arithmetic expression */
755 kind = e_kind;
756 break;
757
758 case tcc_constant: /* a constant */
759 kind = c_kind;
760 break;
761
762 case tcc_exceptional: /* something random, like an identifier. */
763 switch (code)
764 {
765 case IDENTIFIER_NODE:
766 kind = id_kind;
767 break;
768
769 case TREE_VEC:
770 kind = vec_kind;
771 break;
772
773 case TREE_BINFO:
774 kind = binfo_kind;
775 break;
776
777 case SSA_NAME:
778 kind = ssa_name_kind;
779 break;
780
781 case BLOCK:
782 kind = b_kind;
783 break;
784
785 case CONSTRUCTOR:
786 kind = constr_kind;
787 break;
788
789 case OMP_CLAUSE:
790 kind = omp_clause_kind;
791 break;
792
793 default:
794 kind = x_kind;
795 break;
796 }
797 break;
798
799 case tcc_vl_exp:
800 kind = e_kind;
801 break;
802
803 default:
804 gcc_unreachable ();
805 }
806
807 tree_code_counts[(int) code]++;
808 tree_node_counts[(int) kind]++;
809 tree_node_sizes[(int) kind] += length;
810 }
811
812 /* Allocate and return a new UID from the DECL_UID namespace. */
813
814 int
815 allocate_decl_uid (void)
816 {
817 return next_decl_uid++;
818 }
819
820 /* Return a newly allocated node of code CODE. For decl and type
821 nodes, some other fields are initialized. The rest of the node is
822 initialized to zero. This function cannot be used for TREE_VEC or
823 OMP_CLAUSE nodes, which is enforced by asserts in tree_code_size.
824
825 Achoo! I got a code in the node. */
826
827 tree
828 make_node_stat (enum tree_code code MEM_STAT_DECL)
829 {
830 tree t;
831 enum tree_code_class type = TREE_CODE_CLASS (code);
832 size_t length = tree_code_size (code);
833
834 record_node_allocation_statistics (code, length);
835
836 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
837 TREE_SET_CODE (t, code);
838
839 switch (type)
840 {
841 case tcc_statement:
842 TREE_SIDE_EFFECTS (t) = 1;
843 break;
844
845 case tcc_declaration:
846 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
847 {
848 if (code == FUNCTION_DECL)
849 {
850 DECL_ALIGN (t) = FUNCTION_BOUNDARY;
851 DECL_MODE (t) = FUNCTION_MODE;
852 }
853 else
854 DECL_ALIGN (t) = 1;
855 }
856 DECL_SOURCE_LOCATION (t) = input_location;
857 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
858 DECL_UID (t) = --next_debug_decl_uid;
859 else
860 {
861 DECL_UID (t) = allocate_decl_uid ();
862 SET_DECL_PT_UID (t, -1);
863 }
864 if (TREE_CODE (t) == LABEL_DECL)
865 LABEL_DECL_UID (t) = -1;
866
867 break;
868
869 case tcc_type:
870 TYPE_UID (t) = next_type_uid++;
871 TYPE_ALIGN (t) = BITS_PER_UNIT;
872 TYPE_USER_ALIGN (t) = 0;
873 TYPE_MAIN_VARIANT (t) = t;
874 TYPE_CANONICAL (t) = t;
875
876 /* Default to no attributes for type, but let target change that. */
877 TYPE_ATTRIBUTES (t) = NULL_TREE;
878 targetm.set_default_type_attributes (t);
879
880 /* We have not yet computed the alias set for this type. */
881 TYPE_ALIAS_SET (t) = -1;
882 break;
883
884 case tcc_constant:
885 TREE_CONSTANT (t) = 1;
886 break;
887
888 case tcc_expression:
889 switch (code)
890 {
891 case INIT_EXPR:
892 case MODIFY_EXPR:
893 case VA_ARG_EXPR:
894 case PREDECREMENT_EXPR:
895 case PREINCREMENT_EXPR:
896 case POSTDECREMENT_EXPR:
897 case POSTINCREMENT_EXPR:
898 /* All of these have side-effects, no matter what their
899 operands are. */
900 TREE_SIDE_EFFECTS (t) = 1;
901 break;
902
903 default:
904 break;
905 }
906 break;
907
908 default:
909 /* Other classes need no special treatment. */
910 break;
911 }
912
913 return t;
914 }
915 \f
916 /* Return a new node with the same contents as NODE except that its
917 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
918
919 tree
920 copy_node_stat (tree node MEM_STAT_DECL)
921 {
922 tree t;
923 enum tree_code code = TREE_CODE (node);
924 size_t length;
925
926 gcc_assert (code != STATEMENT_LIST);
927
928 length = tree_size (node);
929 record_node_allocation_statistics (code, length);
930 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
931 memcpy (t, node, length);
932
933 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
934 TREE_CHAIN (t) = 0;
935 TREE_ASM_WRITTEN (t) = 0;
936 TREE_VISITED (t) = 0;
937
938 if (TREE_CODE_CLASS (code) == tcc_declaration)
939 {
940 if (code == DEBUG_EXPR_DECL)
941 DECL_UID (t) = --next_debug_decl_uid;
942 else
943 {
944 DECL_UID (t) = allocate_decl_uid ();
945 if (DECL_PT_UID_SET_P (node))
946 SET_DECL_PT_UID (t, DECL_PT_UID (node));
947 }
948 if ((TREE_CODE (node) == PARM_DECL || TREE_CODE (node) == VAR_DECL)
949 && DECL_HAS_VALUE_EXPR_P (node))
950 {
951 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
952 DECL_HAS_VALUE_EXPR_P (t) = 1;
953 }
954 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
955 if (TREE_CODE (node) == VAR_DECL)
956 DECL_HAS_DEBUG_EXPR_P (t) = 0;
957 if (TREE_CODE (node) == VAR_DECL && DECL_HAS_INIT_PRIORITY_P (node))
958 {
959 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
960 DECL_HAS_INIT_PRIORITY_P (t) = 1;
961 }
962 if (TREE_CODE (node) == FUNCTION_DECL)
963 DECL_STRUCT_FUNCTION (t) = NULL;
964 }
965 else if (TREE_CODE_CLASS (code) == tcc_type)
966 {
967 TYPE_UID (t) = next_type_uid++;
968 /* The following is so that the debug code for
969 the copy is different from the original type.
970 The two statements usually duplicate each other
971 (because they clear fields of the same union),
972 but the optimizer should catch that. */
973 TYPE_SYMTAB_POINTER (t) = 0;
974 TYPE_SYMTAB_ADDRESS (t) = 0;
975
976 /* Do not copy the values cache. */
977 if (TYPE_CACHED_VALUES_P (t))
978 {
979 TYPE_CACHED_VALUES_P (t) = 0;
980 TYPE_CACHED_VALUES (t) = NULL_TREE;
981 }
982 }
983
984 return t;
985 }
986
987 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
988 For example, this can copy a list made of TREE_LIST nodes. */
989
990 tree
991 copy_list (tree list)
992 {
993 tree head;
994 tree prev, next;
995
996 if (list == 0)
997 return 0;
998
999 head = prev = copy_node (list);
1000 next = TREE_CHAIN (list);
1001 while (next)
1002 {
1003 TREE_CHAIN (prev) = copy_node (next);
1004 prev = TREE_CHAIN (prev);
1005 next = TREE_CHAIN (next);
1006 }
1007 return head;
1008 }
1009
1010 \f
1011 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1012
1013 tree
1014 build_int_cst (tree type, HOST_WIDE_INT low)
1015 {
1016 /* Support legacy code. */
1017 if (!type)
1018 type = integer_type_node;
1019
1020 return double_int_to_tree (type, double_int::from_shwi (low));
1021 }
1022
1023 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1024
1025 tree
1026 build_int_cst_type (tree type, HOST_WIDE_INT low)
1027 {
1028 gcc_assert (type);
1029
1030 return double_int_to_tree (type, double_int::from_shwi (low));
1031 }
1032
1033 /* Constructs tree in type TYPE from with value given by CST. Signedness
1034 of CST is assumed to be the same as the signedness of TYPE. */
1035
1036 tree
1037 double_int_to_tree (tree type, double_int cst)
1038 {
1039 bool sign_extended_type = !TYPE_UNSIGNED (type);
1040
1041 cst = cst.ext (TYPE_PRECISION (type), !sign_extended_type);
1042
1043 return build_int_cst_wide (type, cst.low, cst.high);
1044 }
1045
1046 /* Returns true if CST fits into range of TYPE. Signedness of CST is assumed
1047 to be the same as the signedness of TYPE. */
1048
1049 bool
1050 double_int_fits_to_tree_p (const_tree type, double_int cst)
1051 {
1052 bool sign_extended_type = !TYPE_UNSIGNED (type);
1053
1054 double_int ext
1055 = cst.ext (TYPE_PRECISION (type), !sign_extended_type);
1056
1057 return cst == ext;
1058 }
1059
1060 /* We force the double_int CST to the range of the type TYPE by sign or
1061 zero extending it. OVERFLOWABLE indicates if we are interested in
1062 overflow of the value, when >0 we are only interested in signed
1063 overflow, for <0 we are interested in any overflow. OVERFLOWED
1064 indicates whether overflow has already occurred. CONST_OVERFLOWED
1065 indicates whether constant overflow has already occurred. We force
1066 T's value to be within range of T's type (by setting to 0 or 1 all
1067 the bits outside the type's range). We set TREE_OVERFLOWED if,
1068 OVERFLOWED is nonzero,
1069 or OVERFLOWABLE is >0 and signed overflow occurs
1070 or OVERFLOWABLE is <0 and any overflow occurs
1071 We return a new tree node for the extended double_int. The node
1072 is shared if no overflow flags are set. */
1073
1074
1075 tree
1076 force_fit_type_double (tree type, double_int cst, int overflowable,
1077 bool overflowed)
1078 {
1079 bool sign_extended_type = !TYPE_UNSIGNED (type);
1080
1081 /* If we need to set overflow flags, return a new unshared node. */
1082 if (overflowed || !double_int_fits_to_tree_p (type, cst))
1083 {
1084 if (overflowed
1085 || overflowable < 0
1086 || (overflowable > 0 && sign_extended_type))
1087 {
1088 tree t = make_node (INTEGER_CST);
1089 TREE_INT_CST (t)
1090 = cst.ext (TYPE_PRECISION (type), !sign_extended_type);
1091 TREE_TYPE (t) = type;
1092 TREE_OVERFLOW (t) = 1;
1093 return t;
1094 }
1095 }
1096
1097 /* Else build a shared node. */
1098 return double_int_to_tree (type, cst);
1099 }
1100
1101 /* These are the hash table functions for the hash table of INTEGER_CST
1102 nodes of a sizetype. */
1103
1104 /* Return the hash code code X, an INTEGER_CST. */
1105
1106 static hashval_t
1107 int_cst_hash_hash (const void *x)
1108 {
1109 const_tree const t = (const_tree) x;
1110
1111 return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1112 ^ htab_hash_pointer (TREE_TYPE (t)));
1113 }
1114
1115 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1116 is the same as that given by *Y, which is the same. */
1117
1118 static int
1119 int_cst_hash_eq (const void *x, const void *y)
1120 {
1121 const_tree const xt = (const_tree) x;
1122 const_tree const yt = (const_tree) y;
1123
1124 return (TREE_TYPE (xt) == TREE_TYPE (yt)
1125 && TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1126 && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt));
1127 }
1128
1129 /* Create an INT_CST node of TYPE and value HI:LOW.
1130 The returned node is always shared. For small integers we use a
1131 per-type vector cache, for larger ones we use a single hash table. */
1132
1133 tree
1134 build_int_cst_wide (tree type, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
1135 {
1136 tree t;
1137 int ix = -1;
1138 int limit = 0;
1139
1140 gcc_assert (type);
1141
1142 switch (TREE_CODE (type))
1143 {
1144 case NULLPTR_TYPE:
1145 gcc_assert (hi == 0 && low == 0);
1146 /* Fallthru. */
1147
1148 case POINTER_TYPE:
1149 case REFERENCE_TYPE:
1150 case POINTER_BOUNDS_TYPE:
1151 /* Cache NULL pointer and zero bounds. */
1152 if (!hi && !low)
1153 {
1154 limit = 1;
1155 ix = 0;
1156 }
1157 break;
1158
1159 case BOOLEAN_TYPE:
1160 /* Cache false or true. */
1161 limit = 2;
1162 if (!hi && low < 2)
1163 ix = low;
1164 break;
1165
1166 case INTEGER_TYPE:
1167 case OFFSET_TYPE:
1168 if (TYPE_UNSIGNED (type))
1169 {
1170 /* Cache 0..N */
1171 limit = INTEGER_SHARE_LIMIT;
1172 if (!hi && low < (unsigned HOST_WIDE_INT)INTEGER_SHARE_LIMIT)
1173 ix = low;
1174 }
1175 else
1176 {
1177 /* Cache -1..N */
1178 limit = INTEGER_SHARE_LIMIT + 1;
1179 if (!hi && low < (unsigned HOST_WIDE_INT)INTEGER_SHARE_LIMIT)
1180 ix = low + 1;
1181 else if (hi == -1 && low == -(unsigned HOST_WIDE_INT)1)
1182 ix = 0;
1183 }
1184 break;
1185
1186 case ENUMERAL_TYPE:
1187 break;
1188
1189 default:
1190 gcc_unreachable ();
1191 }
1192
1193 if (ix >= 0)
1194 {
1195 /* Look for it in the type's vector of small shared ints. */
1196 if (!TYPE_CACHED_VALUES_P (type))
1197 {
1198 TYPE_CACHED_VALUES_P (type) = 1;
1199 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1200 }
1201
1202 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1203 if (t)
1204 {
1205 /* Make sure no one is clobbering the shared constant. */
1206 gcc_assert (TREE_TYPE (t) == type);
1207 gcc_assert (TREE_INT_CST_LOW (t) == low);
1208 gcc_assert (TREE_INT_CST_HIGH (t) == hi);
1209 }
1210 else
1211 {
1212 /* Create a new shared int. */
1213 t = make_node (INTEGER_CST);
1214
1215 TREE_INT_CST_LOW (t) = low;
1216 TREE_INT_CST_HIGH (t) = hi;
1217 TREE_TYPE (t) = type;
1218
1219 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1220 }
1221 }
1222 else
1223 {
1224 /* Use the cache of larger shared ints. */
1225 void **slot;
1226
1227 TREE_INT_CST_LOW (int_cst_node) = low;
1228 TREE_INT_CST_HIGH (int_cst_node) = hi;
1229 TREE_TYPE (int_cst_node) = type;
1230
1231 slot = htab_find_slot (int_cst_hash_table, int_cst_node, INSERT);
1232 t = (tree) *slot;
1233 if (!t)
1234 {
1235 /* Insert this one into the hash table. */
1236 t = int_cst_node;
1237 *slot = t;
1238 /* Make a new node for next time round. */
1239 int_cst_node = make_node (INTEGER_CST);
1240 }
1241 }
1242
1243 return t;
1244 }
1245
1246 void
1247 cache_integer_cst (tree t)
1248 {
1249 tree type = TREE_TYPE (t);
1250 HOST_WIDE_INT hi = TREE_INT_CST_HIGH (t);
1251 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (t);
1252 int ix = -1;
1253 int limit = 0;
1254
1255 gcc_assert (!TREE_OVERFLOW (t));
1256
1257 switch (TREE_CODE (type))
1258 {
1259 case NULLPTR_TYPE:
1260 gcc_assert (hi == 0 && low == 0);
1261 /* Fallthru. */
1262
1263 case POINTER_TYPE:
1264 case REFERENCE_TYPE:
1265 /* Cache NULL pointer. */
1266 if (!hi && !low)
1267 {
1268 limit = 1;
1269 ix = 0;
1270 }
1271 break;
1272
1273 case BOOLEAN_TYPE:
1274 /* Cache false or true. */
1275 limit = 2;
1276 if (!hi && low < 2)
1277 ix = low;
1278 break;
1279
1280 case INTEGER_TYPE:
1281 case OFFSET_TYPE:
1282 if (TYPE_UNSIGNED (type))
1283 {
1284 /* Cache 0..N */
1285 limit = INTEGER_SHARE_LIMIT;
1286 if (!hi && low < (unsigned HOST_WIDE_INT)INTEGER_SHARE_LIMIT)
1287 ix = low;
1288 }
1289 else
1290 {
1291 /* Cache -1..N */
1292 limit = INTEGER_SHARE_LIMIT + 1;
1293 if (!hi && low < (unsigned HOST_WIDE_INT)INTEGER_SHARE_LIMIT)
1294 ix = low + 1;
1295 else if (hi == -1 && low == -(unsigned HOST_WIDE_INT)1)
1296 ix = 0;
1297 }
1298 break;
1299
1300 case ENUMERAL_TYPE:
1301 break;
1302
1303 default:
1304 gcc_unreachable ();
1305 }
1306
1307 if (ix >= 0)
1308 {
1309 /* Look for it in the type's vector of small shared ints. */
1310 if (!TYPE_CACHED_VALUES_P (type))
1311 {
1312 TYPE_CACHED_VALUES_P (type) = 1;
1313 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1314 }
1315
1316 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1317 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1318 }
1319 else
1320 {
1321 /* Use the cache of larger shared ints. */
1322 void **slot;
1323
1324 slot = htab_find_slot (int_cst_hash_table, t, INSERT);
1325 /* If there is already an entry for the number verify it's the
1326 same. */
1327 if (*slot)
1328 {
1329 gcc_assert (TREE_INT_CST_LOW ((tree)*slot) == low
1330 && TREE_INT_CST_HIGH ((tree)*slot) == hi);
1331 return;
1332 }
1333 /* Otherwise insert this one into the hash table. */
1334 *slot = t;
1335 }
1336 }
1337
1338
1339 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1340 and the rest are zeros. */
1341
1342 tree
1343 build_low_bits_mask (tree type, unsigned bits)
1344 {
1345 double_int mask;
1346
1347 gcc_assert (bits <= TYPE_PRECISION (type));
1348
1349 if (bits == TYPE_PRECISION (type)
1350 && !TYPE_UNSIGNED (type))
1351 /* Sign extended all-ones mask. */
1352 mask = double_int_minus_one;
1353 else
1354 mask = double_int::mask (bits);
1355
1356 return build_int_cst_wide (type, mask.low, mask.high);
1357 }
1358
1359 /* Checks that X is integer constant that can be expressed in (unsigned)
1360 HOST_WIDE_INT without loss of precision. */
1361
1362 bool
1363 cst_and_fits_in_hwi (const_tree x)
1364 {
1365 if (TREE_CODE (x) != INTEGER_CST)
1366 return false;
1367
1368 if (TYPE_PRECISION (TREE_TYPE (x)) > HOST_BITS_PER_WIDE_INT)
1369 return false;
1370
1371 return (TREE_INT_CST_HIGH (x) == 0
1372 || TREE_INT_CST_HIGH (x) == -1);
1373 }
1374
1375 /* Build a newly constructed TREE_VEC node of length LEN. */
1376
1377 tree
1378 make_vector_stat (unsigned len MEM_STAT_DECL)
1379 {
1380 tree t;
1381 unsigned length = (len - 1) * sizeof (tree) + sizeof (struct tree_vector);
1382
1383 record_node_allocation_statistics (VECTOR_CST, length);
1384
1385 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1386
1387 TREE_SET_CODE (t, VECTOR_CST);
1388 TREE_CONSTANT (t) = 1;
1389
1390 return t;
1391 }
1392
1393 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1394 are in a list pointed to by VALS. */
1395
1396 tree
1397 build_vector_stat (tree type, tree *vals MEM_STAT_DECL)
1398 {
1399 int over = 0;
1400 unsigned cnt = 0;
1401 tree v = make_vector (TYPE_VECTOR_SUBPARTS (type));
1402 TREE_TYPE (v) = type;
1403
1404 /* Iterate through elements and check for overflow. */
1405 for (cnt = 0; cnt < TYPE_VECTOR_SUBPARTS (type); ++cnt)
1406 {
1407 tree value = vals[cnt];
1408
1409 VECTOR_CST_ELT (v, cnt) = value;
1410
1411 /* Don't crash if we get an address constant. */
1412 if (!CONSTANT_CLASS_P (value))
1413 continue;
1414
1415 over |= TREE_OVERFLOW (value);
1416 }
1417
1418 TREE_OVERFLOW (v) = over;
1419 return v;
1420 }
1421
1422 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1423 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1424
1425 tree
1426 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1427 {
1428 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
1429 unsigned HOST_WIDE_INT idx;
1430 tree value;
1431
1432 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1433 vec[idx] = value;
1434 for (; idx < TYPE_VECTOR_SUBPARTS (type); ++idx)
1435 vec[idx] = build_zero_cst (TREE_TYPE (type));
1436
1437 return build_vector (type, vec);
1438 }
1439
1440 /* Build a vector of type VECTYPE where all the elements are SCs. */
1441 tree
1442 build_vector_from_val (tree vectype, tree sc)
1443 {
1444 int i, nunits = TYPE_VECTOR_SUBPARTS (vectype);
1445
1446 if (sc == error_mark_node)
1447 return sc;
1448
1449 /* Verify that the vector type is suitable for SC. Note that there
1450 is some inconsistency in the type-system with respect to restrict
1451 qualifications of pointers. Vector types always have a main-variant
1452 element type and the qualification is applied to the vector-type.
1453 So TREE_TYPE (vector-type) does not return a properly qualified
1454 vector element-type. */
1455 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1456 TREE_TYPE (vectype)));
1457
1458 if (CONSTANT_CLASS_P (sc))
1459 {
1460 tree *v = XALLOCAVEC (tree, nunits);
1461 for (i = 0; i < nunits; ++i)
1462 v[i] = sc;
1463 return build_vector (vectype, v);
1464 }
1465 else
1466 {
1467 vec<constructor_elt, va_gc> *v;
1468 vec_alloc (v, nunits);
1469 for (i = 0; i < nunits; ++i)
1470 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1471 return build_constructor (vectype, v);
1472 }
1473 }
1474
1475 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1476 are in the vec pointed to by VALS. */
1477 tree
1478 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
1479 {
1480 tree c = make_node (CONSTRUCTOR);
1481 unsigned int i;
1482 constructor_elt *elt;
1483 bool constant_p = true;
1484 bool side_effects_p = false;
1485
1486 TREE_TYPE (c) = type;
1487 CONSTRUCTOR_ELTS (c) = vals;
1488
1489 FOR_EACH_VEC_SAFE_ELT (vals, i, elt)
1490 {
1491 /* Mostly ctors will have elts that don't have side-effects, so
1492 the usual case is to scan all the elements. Hence a single
1493 loop for both const and side effects, rather than one loop
1494 each (with early outs). */
1495 if (!TREE_CONSTANT (elt->value))
1496 constant_p = false;
1497 if (TREE_SIDE_EFFECTS (elt->value))
1498 side_effects_p = true;
1499 }
1500
1501 TREE_SIDE_EFFECTS (c) = side_effects_p;
1502 TREE_CONSTANT (c) = constant_p;
1503
1504 return c;
1505 }
1506
1507 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1508 INDEX and VALUE. */
1509 tree
1510 build_constructor_single (tree type, tree index, tree value)
1511 {
1512 vec<constructor_elt, va_gc> *v;
1513 constructor_elt elt = {index, value};
1514
1515 vec_alloc (v, 1);
1516 v->quick_push (elt);
1517
1518 return build_constructor (type, v);
1519 }
1520
1521
1522 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1523 are in a list pointed to by VALS. */
1524 tree
1525 build_constructor_from_list (tree type, tree vals)
1526 {
1527 tree t;
1528 vec<constructor_elt, va_gc> *v = NULL;
1529
1530 if (vals)
1531 {
1532 vec_alloc (v, list_length (vals));
1533 for (t = vals; t; t = TREE_CHAIN (t))
1534 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
1535 }
1536
1537 return build_constructor (type, v);
1538 }
1539
1540 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1541 of elements, provided as index/value pairs. */
1542
1543 tree
1544 build_constructor_va (tree type, int nelts, ...)
1545 {
1546 vec<constructor_elt, va_gc> *v = NULL;
1547 va_list p;
1548
1549 va_start (p, nelts);
1550 vec_alloc (v, nelts);
1551 while (nelts--)
1552 {
1553 tree index = va_arg (p, tree);
1554 tree value = va_arg (p, tree);
1555 CONSTRUCTOR_APPEND_ELT (v, index, value);
1556 }
1557 va_end (p);
1558 return build_constructor (type, v);
1559 }
1560
1561 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1562
1563 tree
1564 build_fixed (tree type, FIXED_VALUE_TYPE f)
1565 {
1566 tree v;
1567 FIXED_VALUE_TYPE *fp;
1568
1569 v = make_node (FIXED_CST);
1570 fp = ggc_alloc_fixed_value ();
1571 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
1572
1573 TREE_TYPE (v) = type;
1574 TREE_FIXED_CST_PTR (v) = fp;
1575 return v;
1576 }
1577
1578 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1579
1580 tree
1581 build_real (tree type, REAL_VALUE_TYPE d)
1582 {
1583 tree v;
1584 REAL_VALUE_TYPE *dp;
1585 int overflow = 0;
1586
1587 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1588 Consider doing it via real_convert now. */
1589
1590 v = make_node (REAL_CST);
1591 dp = ggc_alloc_real_value ();
1592 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
1593
1594 TREE_TYPE (v) = type;
1595 TREE_REAL_CST_PTR (v) = dp;
1596 TREE_OVERFLOW (v) = overflow;
1597 return v;
1598 }
1599
1600 /* Return a new REAL_CST node whose type is TYPE
1601 and whose value is the integer value of the INTEGER_CST node I. */
1602
1603 REAL_VALUE_TYPE
1604 real_value_from_int_cst (const_tree type, const_tree i)
1605 {
1606 REAL_VALUE_TYPE d;
1607
1608 /* Clear all bits of the real value type so that we can later do
1609 bitwise comparisons to see if two values are the same. */
1610 memset (&d, 0, sizeof d);
1611
1612 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode,
1613 TREE_INT_CST_LOW (i), TREE_INT_CST_HIGH (i),
1614 TYPE_UNSIGNED (TREE_TYPE (i)));
1615 return d;
1616 }
1617
1618 /* Given a tree representing an integer constant I, return a tree
1619 representing the same value as a floating-point constant of type TYPE. */
1620
1621 tree
1622 build_real_from_int_cst (tree type, const_tree i)
1623 {
1624 tree v;
1625 int overflow = TREE_OVERFLOW (i);
1626
1627 v = build_real (type, real_value_from_int_cst (type, i));
1628
1629 TREE_OVERFLOW (v) |= overflow;
1630 return v;
1631 }
1632
1633 /* Return a newly constructed STRING_CST node whose value is
1634 the LEN characters at STR.
1635 Note that for a C string literal, LEN should include the trailing NUL.
1636 The TREE_TYPE is not initialized. */
1637
1638 tree
1639 build_string (int len, const char *str)
1640 {
1641 tree s;
1642 size_t length;
1643
1644 /* Do not waste bytes provided by padding of struct tree_string. */
1645 length = len + offsetof (struct tree_string, str) + 1;
1646
1647 record_node_allocation_statistics (STRING_CST, length);
1648
1649 s = ggc_alloc_tree_node (length);
1650
1651 memset (s, 0, sizeof (struct tree_typed));
1652 TREE_SET_CODE (s, STRING_CST);
1653 TREE_CONSTANT (s) = 1;
1654 TREE_STRING_LENGTH (s) = len;
1655 memcpy (s->string.str, str, len);
1656 s->string.str[len] = '\0';
1657
1658 return s;
1659 }
1660
1661 /* Return a newly constructed COMPLEX_CST node whose value is
1662 specified by the real and imaginary parts REAL and IMAG.
1663 Both REAL and IMAG should be constant nodes. TYPE, if specified,
1664 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
1665
1666 tree
1667 build_complex (tree type, tree real, tree imag)
1668 {
1669 tree t = make_node (COMPLEX_CST);
1670
1671 TREE_REALPART (t) = real;
1672 TREE_IMAGPART (t) = imag;
1673 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
1674 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
1675 return t;
1676 }
1677
1678 /* Return a constant of arithmetic type TYPE which is the
1679 multiplicative identity of the set TYPE. */
1680
1681 tree
1682 build_one_cst (tree type)
1683 {
1684 switch (TREE_CODE (type))
1685 {
1686 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1687 case POINTER_TYPE: case REFERENCE_TYPE:
1688 case OFFSET_TYPE:
1689 return build_int_cst (type, 1);
1690
1691 case REAL_TYPE:
1692 return build_real (type, dconst1);
1693
1694 case FIXED_POINT_TYPE:
1695 /* We can only generate 1 for accum types. */
1696 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1697 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
1698
1699 case VECTOR_TYPE:
1700 {
1701 tree scalar = build_one_cst (TREE_TYPE (type));
1702
1703 return build_vector_from_val (type, scalar);
1704 }
1705
1706 case COMPLEX_TYPE:
1707 return build_complex (type,
1708 build_one_cst (TREE_TYPE (type)),
1709 build_zero_cst (TREE_TYPE (type)));
1710
1711 default:
1712 gcc_unreachable ();
1713 }
1714 }
1715
1716 /* Return an integer of type TYPE containing all 1's in as much precision as
1717 it contains, or a complex or vector whose subparts are such integers. */
1718
1719 tree
1720 build_all_ones_cst (tree type)
1721 {
1722 if (TREE_CODE (type) == COMPLEX_TYPE)
1723 {
1724 tree scalar = build_all_ones_cst (TREE_TYPE (type));
1725 return build_complex (type, scalar, scalar);
1726 }
1727 else
1728 return build_minus_one_cst (type);
1729 }
1730
1731 /* Return a constant of arithmetic type TYPE which is the
1732 opposite of the multiplicative identity of the set TYPE. */
1733
1734 tree
1735 build_minus_one_cst (tree type)
1736 {
1737 switch (TREE_CODE (type))
1738 {
1739 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1740 case POINTER_TYPE: case REFERENCE_TYPE:
1741 case OFFSET_TYPE:
1742 return build_int_cst (type, -1);
1743
1744 case REAL_TYPE:
1745 return build_real (type, dconstm1);
1746
1747 case FIXED_POINT_TYPE:
1748 /* We can only generate 1 for accum types. */
1749 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1750 return build_fixed (type, fixed_from_double_int (double_int_minus_one,
1751 TYPE_MODE (type)));
1752
1753 case VECTOR_TYPE:
1754 {
1755 tree scalar = build_minus_one_cst (TREE_TYPE (type));
1756
1757 return build_vector_from_val (type, scalar);
1758 }
1759
1760 case COMPLEX_TYPE:
1761 return build_complex (type,
1762 build_minus_one_cst (TREE_TYPE (type)),
1763 build_zero_cst (TREE_TYPE (type)));
1764
1765 default:
1766 gcc_unreachable ();
1767 }
1768 }
1769
1770 /* Build 0 constant of type TYPE. This is used by constructor folding
1771 and thus the constant should be represented in memory by
1772 zero(es). */
1773
1774 tree
1775 build_zero_cst (tree type)
1776 {
1777 switch (TREE_CODE (type))
1778 {
1779 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1780 case POINTER_TYPE: case REFERENCE_TYPE:
1781 case OFFSET_TYPE: case NULLPTR_TYPE:
1782 return build_int_cst (type, 0);
1783
1784 case REAL_TYPE:
1785 return build_real (type, dconst0);
1786
1787 case FIXED_POINT_TYPE:
1788 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
1789
1790 case VECTOR_TYPE:
1791 {
1792 tree scalar = build_zero_cst (TREE_TYPE (type));
1793
1794 return build_vector_from_val (type, scalar);
1795 }
1796
1797 case COMPLEX_TYPE:
1798 {
1799 tree zero = build_zero_cst (TREE_TYPE (type));
1800
1801 return build_complex (type, zero, zero);
1802 }
1803
1804 default:
1805 if (!AGGREGATE_TYPE_P (type))
1806 return fold_convert (type, integer_zero_node);
1807 return build_constructor (type, NULL);
1808 }
1809 }
1810
1811
1812 /* Build a BINFO with LEN language slots. */
1813
1814 tree
1815 make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL)
1816 {
1817 tree t;
1818 size_t length = (offsetof (struct tree_binfo, base_binfos)
1819 + vec<tree, va_gc>::embedded_size (base_binfos));
1820
1821 record_node_allocation_statistics (TREE_BINFO, length);
1822
1823 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1824
1825 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
1826
1827 TREE_SET_CODE (t, TREE_BINFO);
1828
1829 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
1830
1831 return t;
1832 }
1833
1834 /* Create a CASE_LABEL_EXPR tree node and return it. */
1835
1836 tree
1837 build_case_label (tree low_value, tree high_value, tree label_decl)
1838 {
1839 tree t = make_node (CASE_LABEL_EXPR);
1840
1841 TREE_TYPE (t) = void_type_node;
1842 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
1843
1844 CASE_LOW (t) = low_value;
1845 CASE_HIGH (t) = high_value;
1846 CASE_LABEL (t) = label_decl;
1847 CASE_CHAIN (t) = NULL_TREE;
1848
1849 return t;
1850 }
1851
1852 /* Build a newly constructed TREE_VEC node of length LEN. */
1853
1854 tree
1855 make_tree_vec_stat (int len MEM_STAT_DECL)
1856 {
1857 tree t;
1858 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
1859
1860 record_node_allocation_statistics (TREE_VEC, length);
1861
1862 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1863
1864 TREE_SET_CODE (t, TREE_VEC);
1865 TREE_VEC_LENGTH (t) = len;
1866
1867 return t;
1868 }
1869
1870 /* Grow a TREE_VEC node to new length LEN. */
1871
1872 tree
1873 grow_tree_vec_stat (tree v, int len MEM_STAT_DECL)
1874 {
1875 gcc_assert (TREE_CODE (v) == TREE_VEC);
1876
1877 int oldlen = TREE_VEC_LENGTH (v);
1878 gcc_assert (len > oldlen);
1879
1880 int oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
1881 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
1882
1883 record_node_allocation_statistics (TREE_VEC, length - oldlength);
1884
1885 v = (tree) ggc_realloc_stat (v, length PASS_MEM_STAT);
1886
1887 TREE_VEC_LENGTH (v) = len;
1888
1889 return v;
1890 }
1891 \f
1892 /* Return 1 if EXPR is the integer constant zero or a complex constant
1893 of zero. */
1894
1895 int
1896 integer_zerop (const_tree expr)
1897 {
1898 STRIP_NOPS (expr);
1899
1900 switch (TREE_CODE (expr))
1901 {
1902 case INTEGER_CST:
1903 return (TREE_INT_CST_LOW (expr) == 0
1904 && TREE_INT_CST_HIGH (expr) == 0);
1905 case COMPLEX_CST:
1906 return (integer_zerop (TREE_REALPART (expr))
1907 && integer_zerop (TREE_IMAGPART (expr)));
1908 case VECTOR_CST:
1909 {
1910 unsigned i;
1911 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
1912 if (!integer_zerop (VECTOR_CST_ELT (expr, i)))
1913 return false;
1914 return true;
1915 }
1916 default:
1917 return false;
1918 }
1919 }
1920
1921 /* Return 1 if EXPR is the integer constant one or the corresponding
1922 complex constant. */
1923
1924 int
1925 integer_onep (const_tree expr)
1926 {
1927 STRIP_NOPS (expr);
1928
1929 switch (TREE_CODE (expr))
1930 {
1931 case INTEGER_CST:
1932 return (TREE_INT_CST_LOW (expr) == 1
1933 && TREE_INT_CST_HIGH (expr) == 0);
1934 case COMPLEX_CST:
1935 return (integer_onep (TREE_REALPART (expr))
1936 && integer_zerop (TREE_IMAGPART (expr)));
1937 case VECTOR_CST:
1938 {
1939 unsigned i;
1940 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
1941 if (!integer_onep (VECTOR_CST_ELT (expr, i)))
1942 return false;
1943 return true;
1944 }
1945 default:
1946 return false;
1947 }
1948 }
1949
1950 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
1951 it contains, or a complex or vector whose subparts are such integers. */
1952
1953 int
1954 integer_all_onesp (const_tree expr)
1955 {
1956 int prec;
1957 int uns;
1958
1959 STRIP_NOPS (expr);
1960
1961 if (TREE_CODE (expr) == COMPLEX_CST
1962 && integer_all_onesp (TREE_REALPART (expr))
1963 && integer_all_onesp (TREE_IMAGPART (expr)))
1964 return 1;
1965
1966 else if (TREE_CODE (expr) == VECTOR_CST)
1967 {
1968 unsigned i;
1969 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
1970 if (!integer_all_onesp (VECTOR_CST_ELT (expr, i)))
1971 return 0;
1972 return 1;
1973 }
1974
1975 else if (TREE_CODE (expr) != INTEGER_CST)
1976 return 0;
1977
1978 uns = TYPE_UNSIGNED (TREE_TYPE (expr));
1979 if (TREE_INT_CST_LOW (expr) == ~(unsigned HOST_WIDE_INT) 0
1980 && TREE_INT_CST_HIGH (expr) == -1)
1981 return 1;
1982 if (!uns)
1983 return 0;
1984
1985 prec = TYPE_PRECISION (TREE_TYPE (expr));
1986 if (prec >= HOST_BITS_PER_WIDE_INT)
1987 {
1988 HOST_WIDE_INT high_value;
1989 int shift_amount;
1990
1991 shift_amount = prec - HOST_BITS_PER_WIDE_INT;
1992
1993 /* Can not handle precisions greater than twice the host int size. */
1994 gcc_assert (shift_amount <= HOST_BITS_PER_WIDE_INT);
1995 if (shift_amount == HOST_BITS_PER_WIDE_INT)
1996 /* Shifting by the host word size is undefined according to the ANSI
1997 standard, so we must handle this as a special case. */
1998 high_value = -1;
1999 else
2000 high_value = ((HOST_WIDE_INT) 1 << shift_amount) - 1;
2001
2002 return (TREE_INT_CST_LOW (expr) == ~(unsigned HOST_WIDE_INT) 0
2003 && TREE_INT_CST_HIGH (expr) == high_value);
2004 }
2005 else
2006 return TREE_INT_CST_LOW (expr) == ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
2007 }
2008
2009 /* Return 1 if EXPR is the integer constant minus one. */
2010
2011 int
2012 integer_minus_onep (const_tree expr)
2013 {
2014 STRIP_NOPS (expr);
2015
2016 if (TREE_CODE (expr) == COMPLEX_CST)
2017 return (integer_all_onesp (TREE_REALPART (expr))
2018 && integer_zerop (TREE_IMAGPART (expr)));
2019 else
2020 return integer_all_onesp (expr);
2021 }
2022
2023 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2024 one bit on). */
2025
2026 int
2027 integer_pow2p (const_tree expr)
2028 {
2029 int prec;
2030 unsigned HOST_WIDE_INT high, low;
2031
2032 STRIP_NOPS (expr);
2033
2034 if (TREE_CODE (expr) == COMPLEX_CST
2035 && integer_pow2p (TREE_REALPART (expr))
2036 && integer_zerop (TREE_IMAGPART (expr)))
2037 return 1;
2038
2039 if (TREE_CODE (expr) != INTEGER_CST)
2040 return 0;
2041
2042 prec = TYPE_PRECISION (TREE_TYPE (expr));
2043 high = TREE_INT_CST_HIGH (expr);
2044 low = TREE_INT_CST_LOW (expr);
2045
2046 /* First clear all bits that are beyond the type's precision in case
2047 we've been sign extended. */
2048
2049 if (prec == HOST_BITS_PER_DOUBLE_INT)
2050 ;
2051 else if (prec > HOST_BITS_PER_WIDE_INT)
2052 high &= ~(HOST_WIDE_INT_M1U << (prec - HOST_BITS_PER_WIDE_INT));
2053 else
2054 {
2055 high = 0;
2056 if (prec < HOST_BITS_PER_WIDE_INT)
2057 low &= ~(HOST_WIDE_INT_M1U << prec);
2058 }
2059
2060 if (high == 0 && low == 0)
2061 return 0;
2062
2063 return ((high == 0 && (low & (low - 1)) == 0)
2064 || (low == 0 && (high & (high - 1)) == 0));
2065 }
2066
2067 /* Return 1 if EXPR is an integer constant other than zero or a
2068 complex constant other than zero. */
2069
2070 int
2071 integer_nonzerop (const_tree expr)
2072 {
2073 STRIP_NOPS (expr);
2074
2075 return ((TREE_CODE (expr) == INTEGER_CST
2076 && (TREE_INT_CST_LOW (expr) != 0
2077 || TREE_INT_CST_HIGH (expr) != 0))
2078 || (TREE_CODE (expr) == COMPLEX_CST
2079 && (integer_nonzerop (TREE_REALPART (expr))
2080 || integer_nonzerop (TREE_IMAGPART (expr)))));
2081 }
2082
2083 /* Return 1 if EXPR is the fixed-point constant zero. */
2084
2085 int
2086 fixed_zerop (const_tree expr)
2087 {
2088 return (TREE_CODE (expr) == FIXED_CST
2089 && TREE_FIXED_CST (expr).data.is_zero ());
2090 }
2091
2092 /* Return the power of two represented by a tree node known to be a
2093 power of two. */
2094
2095 int
2096 tree_log2 (const_tree expr)
2097 {
2098 int prec;
2099 HOST_WIDE_INT high, low;
2100
2101 STRIP_NOPS (expr);
2102
2103 if (TREE_CODE (expr) == COMPLEX_CST)
2104 return tree_log2 (TREE_REALPART (expr));
2105
2106 prec = TYPE_PRECISION (TREE_TYPE (expr));
2107 high = TREE_INT_CST_HIGH (expr);
2108 low = TREE_INT_CST_LOW (expr);
2109
2110 /* First clear all bits that are beyond the type's precision in case
2111 we've been sign extended. */
2112
2113 if (prec == HOST_BITS_PER_DOUBLE_INT)
2114 ;
2115 else if (prec > HOST_BITS_PER_WIDE_INT)
2116 high &= ~(HOST_WIDE_INT_M1U << (prec - HOST_BITS_PER_WIDE_INT));
2117 else
2118 {
2119 high = 0;
2120 if (prec < HOST_BITS_PER_WIDE_INT)
2121 low &= ~(HOST_WIDE_INT_M1U << prec);
2122 }
2123
2124 return (high != 0 ? HOST_BITS_PER_WIDE_INT + exact_log2 (high)
2125 : exact_log2 (low));
2126 }
2127
2128 /* Similar, but return the largest integer Y such that 2 ** Y is less
2129 than or equal to EXPR. */
2130
2131 int
2132 tree_floor_log2 (const_tree expr)
2133 {
2134 int prec;
2135 HOST_WIDE_INT high, low;
2136
2137 STRIP_NOPS (expr);
2138
2139 if (TREE_CODE (expr) == COMPLEX_CST)
2140 return tree_log2 (TREE_REALPART (expr));
2141
2142 prec = TYPE_PRECISION (TREE_TYPE (expr));
2143 high = TREE_INT_CST_HIGH (expr);
2144 low = TREE_INT_CST_LOW (expr);
2145
2146 /* First clear all bits that are beyond the type's precision in case
2147 we've been sign extended. Ignore if type's precision hasn't been set
2148 since what we are doing is setting it. */
2149
2150 if (prec == HOST_BITS_PER_DOUBLE_INT || prec == 0)
2151 ;
2152 else if (prec > HOST_BITS_PER_WIDE_INT)
2153 high &= ~(HOST_WIDE_INT_M1U << (prec - HOST_BITS_PER_WIDE_INT));
2154 else
2155 {
2156 high = 0;
2157 if (prec < HOST_BITS_PER_WIDE_INT)
2158 low &= ~(HOST_WIDE_INT_M1U << prec);
2159 }
2160
2161 return (high != 0 ? HOST_BITS_PER_WIDE_INT + floor_log2 (high)
2162 : floor_log2 (low));
2163 }
2164
2165 /* Return number of known trailing zero bits in EXPR, or, if the value of
2166 EXPR is known to be zero, the precision of it's type. */
2167
2168 unsigned int
2169 tree_ctz (const_tree expr)
2170 {
2171 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2172 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2173 return 0;
2174
2175 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2176 switch (TREE_CODE (expr))
2177 {
2178 case INTEGER_CST:
2179 ret1 = tree_to_double_int (expr).trailing_zeros ();
2180 return MIN (ret1, prec);
2181 case SSA_NAME:
2182 ret1 = get_nonzero_bits (expr).trailing_zeros ();
2183 return MIN (ret1, prec);
2184 case PLUS_EXPR:
2185 case MINUS_EXPR:
2186 case BIT_IOR_EXPR:
2187 case BIT_XOR_EXPR:
2188 case MIN_EXPR:
2189 case MAX_EXPR:
2190 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2191 if (ret1 == 0)
2192 return ret1;
2193 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2194 return MIN (ret1, ret2);
2195 case POINTER_PLUS_EXPR:
2196 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2197 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2198 /* Second operand is sizetype, which could be in theory
2199 wider than pointer's precision. Make sure we never
2200 return more than prec. */
2201 ret2 = MIN (ret2, prec);
2202 return MIN (ret1, ret2);
2203 case BIT_AND_EXPR:
2204 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2205 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2206 return MAX (ret1, ret2);
2207 case MULT_EXPR:
2208 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2209 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2210 return MIN (ret1 + ret2, prec);
2211 case LSHIFT_EXPR:
2212 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2213 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2214 && ((unsigned HOST_WIDE_INT) tree_low_cst (TREE_OPERAND (expr, 1), 1)
2215 < (unsigned HOST_WIDE_INT) prec))
2216 {
2217 ret2 = tree_low_cst (TREE_OPERAND (expr, 1), 1);
2218 return MIN (ret1 + ret2, prec);
2219 }
2220 return ret1;
2221 case RSHIFT_EXPR:
2222 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2223 && ((unsigned HOST_WIDE_INT) tree_low_cst (TREE_OPERAND (expr, 1), 1)
2224 < (unsigned HOST_WIDE_INT) prec))
2225 {
2226 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2227 ret2 = tree_low_cst (TREE_OPERAND (expr, 1), 1);
2228 if (ret1 > ret2)
2229 return ret1 - ret2;
2230 }
2231 return 0;
2232 case TRUNC_DIV_EXPR:
2233 case CEIL_DIV_EXPR:
2234 case FLOOR_DIV_EXPR:
2235 case ROUND_DIV_EXPR:
2236 case EXACT_DIV_EXPR:
2237 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2238 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2239 {
2240 int l = tree_log2 (TREE_OPERAND (expr, 1));
2241 if (l >= 0)
2242 {
2243 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2244 ret2 = l;
2245 if (ret1 > ret2)
2246 return ret1 - ret2;
2247 }
2248 }
2249 return 0;
2250 CASE_CONVERT:
2251 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2252 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2253 ret1 = prec;
2254 return MIN (ret1, prec);
2255 case SAVE_EXPR:
2256 return tree_ctz (TREE_OPERAND (expr, 0));
2257 case COND_EXPR:
2258 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2259 if (ret1 == 0)
2260 return 0;
2261 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2262 return MIN (ret1, ret2);
2263 case COMPOUND_EXPR:
2264 return tree_ctz (TREE_OPERAND (expr, 1));
2265 case ADDR_EXPR:
2266 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2267 if (ret1 > BITS_PER_UNIT)
2268 {
2269 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2270 return MIN (ret1, prec);
2271 }
2272 return 0;
2273 default:
2274 return 0;
2275 }
2276 }
2277
2278 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2279 decimal float constants, so don't return 1 for them. */
2280
2281 int
2282 real_zerop (const_tree expr)
2283 {
2284 STRIP_NOPS (expr);
2285
2286 switch (TREE_CODE (expr))
2287 {
2288 case REAL_CST:
2289 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst0)
2290 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2291 case COMPLEX_CST:
2292 return real_zerop (TREE_REALPART (expr))
2293 && real_zerop (TREE_IMAGPART (expr));
2294 case VECTOR_CST:
2295 {
2296 unsigned i;
2297 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2298 if (!real_zerop (VECTOR_CST_ELT (expr, i)))
2299 return false;
2300 return true;
2301 }
2302 default:
2303 return false;
2304 }
2305 }
2306
2307 /* Return 1 if EXPR is the real constant one in real or complex form.
2308 Trailing zeroes matter for decimal float constants, so don't return
2309 1 for them. */
2310
2311 int
2312 real_onep (const_tree expr)
2313 {
2314 STRIP_NOPS (expr);
2315
2316 switch (TREE_CODE (expr))
2317 {
2318 case REAL_CST:
2319 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst1)
2320 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2321 case COMPLEX_CST:
2322 return real_onep (TREE_REALPART (expr))
2323 && real_zerop (TREE_IMAGPART (expr));
2324 case VECTOR_CST:
2325 {
2326 unsigned i;
2327 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2328 if (!real_onep (VECTOR_CST_ELT (expr, i)))
2329 return false;
2330 return true;
2331 }
2332 default:
2333 return false;
2334 }
2335 }
2336
2337 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2338 matter for decimal float constants, so don't return 1 for them. */
2339
2340 int
2341 real_minus_onep (const_tree expr)
2342 {
2343 STRIP_NOPS (expr);
2344
2345 switch (TREE_CODE (expr))
2346 {
2347 case REAL_CST:
2348 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconstm1)
2349 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2350 case COMPLEX_CST:
2351 return real_minus_onep (TREE_REALPART (expr))
2352 && real_zerop (TREE_IMAGPART (expr));
2353 case VECTOR_CST:
2354 {
2355 unsigned i;
2356 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2357 if (!real_minus_onep (VECTOR_CST_ELT (expr, i)))
2358 return false;
2359 return true;
2360 }
2361 default:
2362 return false;
2363 }
2364 }
2365
2366 /* Nonzero if EXP is a constant or a cast of a constant. */
2367
2368 int
2369 really_constant_p (const_tree exp)
2370 {
2371 /* This is not quite the same as STRIP_NOPS. It does more. */
2372 while (CONVERT_EXPR_P (exp)
2373 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2374 exp = TREE_OPERAND (exp, 0);
2375 return TREE_CONSTANT (exp);
2376 }
2377 \f
2378 /* Return first list element whose TREE_VALUE is ELEM.
2379 Return 0 if ELEM is not in LIST. */
2380
2381 tree
2382 value_member (tree elem, tree list)
2383 {
2384 while (list)
2385 {
2386 if (elem == TREE_VALUE (list))
2387 return list;
2388 list = TREE_CHAIN (list);
2389 }
2390 return NULL_TREE;
2391 }
2392
2393 /* Return first list element whose TREE_PURPOSE is ELEM.
2394 Return 0 if ELEM is not in LIST. */
2395
2396 tree
2397 purpose_member (const_tree elem, tree list)
2398 {
2399 while (list)
2400 {
2401 if (elem == TREE_PURPOSE (list))
2402 return list;
2403 list = TREE_CHAIN (list);
2404 }
2405 return NULL_TREE;
2406 }
2407
2408 /* Return true if ELEM is in V. */
2409
2410 bool
2411 vec_member (const_tree elem, vec<tree, va_gc> *v)
2412 {
2413 unsigned ix;
2414 tree t;
2415 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2416 if (elem == t)
2417 return true;
2418 return false;
2419 }
2420
2421 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2422 NULL_TREE. */
2423
2424 tree
2425 chain_index (int idx, tree chain)
2426 {
2427 for (; chain && idx > 0; --idx)
2428 chain = TREE_CHAIN (chain);
2429 return chain;
2430 }
2431
2432 /* Return nonzero if ELEM is part of the chain CHAIN. */
2433
2434 int
2435 chain_member (const_tree elem, const_tree chain)
2436 {
2437 while (chain)
2438 {
2439 if (elem == chain)
2440 return 1;
2441 chain = DECL_CHAIN (chain);
2442 }
2443
2444 return 0;
2445 }
2446
2447 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2448 We expect a null pointer to mark the end of the chain.
2449 This is the Lisp primitive `length'. */
2450
2451 int
2452 list_length (const_tree t)
2453 {
2454 const_tree p = t;
2455 #ifdef ENABLE_TREE_CHECKING
2456 const_tree q = t;
2457 #endif
2458 int len = 0;
2459
2460 while (p)
2461 {
2462 p = TREE_CHAIN (p);
2463 #ifdef ENABLE_TREE_CHECKING
2464 if (len % 2)
2465 q = TREE_CHAIN (q);
2466 gcc_assert (p != q);
2467 #endif
2468 len++;
2469 }
2470
2471 return len;
2472 }
2473
2474 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2475 UNION_TYPE TYPE, or NULL_TREE if none. */
2476
2477 tree
2478 first_field (const_tree type)
2479 {
2480 tree t = TYPE_FIELDS (type);
2481 while (t && TREE_CODE (t) != FIELD_DECL)
2482 t = TREE_CHAIN (t);
2483 return t;
2484 }
2485
2486 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2487 by modifying the last node in chain 1 to point to chain 2.
2488 This is the Lisp primitive `nconc'. */
2489
2490 tree
2491 chainon (tree op1, tree op2)
2492 {
2493 tree t1;
2494
2495 if (!op1)
2496 return op2;
2497 if (!op2)
2498 return op1;
2499
2500 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
2501 continue;
2502 TREE_CHAIN (t1) = op2;
2503
2504 #ifdef ENABLE_TREE_CHECKING
2505 {
2506 tree t2;
2507 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
2508 gcc_assert (t2 != t1);
2509 }
2510 #endif
2511
2512 return op1;
2513 }
2514
2515 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2516
2517 tree
2518 tree_last (tree chain)
2519 {
2520 tree next;
2521 if (chain)
2522 while ((next = TREE_CHAIN (chain)))
2523 chain = next;
2524 return chain;
2525 }
2526
2527 /* Reverse the order of elements in the chain T,
2528 and return the new head of the chain (old last element). */
2529
2530 tree
2531 nreverse (tree t)
2532 {
2533 tree prev = 0, decl, next;
2534 for (decl = t; decl; decl = next)
2535 {
2536 /* We shouldn't be using this function to reverse BLOCK chains; we
2537 have blocks_nreverse for that. */
2538 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
2539 next = TREE_CHAIN (decl);
2540 TREE_CHAIN (decl) = prev;
2541 prev = decl;
2542 }
2543 return prev;
2544 }
2545 \f
2546 /* Return a newly created TREE_LIST node whose
2547 purpose and value fields are PARM and VALUE. */
2548
2549 tree
2550 build_tree_list_stat (tree parm, tree value MEM_STAT_DECL)
2551 {
2552 tree t = make_node_stat (TREE_LIST PASS_MEM_STAT);
2553 TREE_PURPOSE (t) = parm;
2554 TREE_VALUE (t) = value;
2555 return t;
2556 }
2557
2558 /* Build a chain of TREE_LIST nodes from a vector. */
2559
2560 tree
2561 build_tree_list_vec_stat (const vec<tree, va_gc> *vec MEM_STAT_DECL)
2562 {
2563 tree ret = NULL_TREE;
2564 tree *pp = &ret;
2565 unsigned int i;
2566 tree t;
2567 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
2568 {
2569 *pp = build_tree_list_stat (NULL, t PASS_MEM_STAT);
2570 pp = &TREE_CHAIN (*pp);
2571 }
2572 return ret;
2573 }
2574
2575 /* Return a newly created TREE_LIST node whose
2576 purpose and value fields are PURPOSE and VALUE
2577 and whose TREE_CHAIN is CHAIN. */
2578
2579 tree
2580 tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL)
2581 {
2582 tree node;
2583
2584 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
2585 memset (node, 0, sizeof (struct tree_common));
2586
2587 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
2588
2589 TREE_SET_CODE (node, TREE_LIST);
2590 TREE_CHAIN (node) = chain;
2591 TREE_PURPOSE (node) = purpose;
2592 TREE_VALUE (node) = value;
2593 return node;
2594 }
2595
2596 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2597 trees. */
2598
2599 vec<tree, va_gc> *
2600 ctor_to_vec (tree ctor)
2601 {
2602 vec<tree, va_gc> *vec;
2603 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
2604 unsigned int ix;
2605 tree val;
2606
2607 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
2608 vec->quick_push (val);
2609
2610 return vec;
2611 }
2612 \f
2613 /* Return the size nominally occupied by an object of type TYPE
2614 when it resides in memory. The value is measured in units of bytes,
2615 and its data type is that normally used for type sizes
2616 (which is the first type created by make_signed_type or
2617 make_unsigned_type). */
2618
2619 tree
2620 size_in_bytes (const_tree type)
2621 {
2622 tree t;
2623
2624 if (type == error_mark_node)
2625 return integer_zero_node;
2626
2627 type = TYPE_MAIN_VARIANT (type);
2628 t = TYPE_SIZE_UNIT (type);
2629
2630 if (t == 0)
2631 {
2632 lang_hooks.types.incomplete_type_error (NULL_TREE, type);
2633 return size_zero_node;
2634 }
2635
2636 return t;
2637 }
2638
2639 /* Return the size of TYPE (in bytes) as a wide integer
2640 or return -1 if the size can vary or is larger than an integer. */
2641
2642 HOST_WIDE_INT
2643 int_size_in_bytes (const_tree type)
2644 {
2645 tree t;
2646
2647 if (type == error_mark_node)
2648 return 0;
2649
2650 type = TYPE_MAIN_VARIANT (type);
2651 t = TYPE_SIZE_UNIT (type);
2652 if (t == 0
2653 || TREE_CODE (t) != INTEGER_CST
2654 || TREE_INT_CST_HIGH (t) != 0
2655 /* If the result would appear negative, it's too big to represent. */
2656 || (HOST_WIDE_INT) TREE_INT_CST_LOW (t) < 0)
2657 return -1;
2658
2659 return TREE_INT_CST_LOW (t);
2660 }
2661
2662 /* Return the maximum size of TYPE (in bytes) as a wide integer
2663 or return -1 if the size can vary or is larger than an integer. */
2664
2665 HOST_WIDE_INT
2666 max_int_size_in_bytes (const_tree type)
2667 {
2668 HOST_WIDE_INT size = -1;
2669 tree size_tree;
2670
2671 /* If this is an array type, check for a possible MAX_SIZE attached. */
2672
2673 if (TREE_CODE (type) == ARRAY_TYPE)
2674 {
2675 size_tree = TYPE_ARRAY_MAX_SIZE (type);
2676
2677 if (size_tree && tree_fits_uhwi_p (size_tree))
2678 size = tree_low_cst (size_tree, 1);
2679 }
2680
2681 /* If we still haven't been able to get a size, see if the language
2682 can compute a maximum size. */
2683
2684 if (size == -1)
2685 {
2686 size_tree = lang_hooks.types.max_size (type);
2687
2688 if (size_tree && tree_fits_uhwi_p (size_tree))
2689 size = tree_low_cst (size_tree, 1);
2690 }
2691
2692 return size;
2693 }
2694 \f
2695 /* Return the bit position of FIELD, in bits from the start of the record.
2696 This is a tree of type bitsizetype. */
2697
2698 tree
2699 bit_position (const_tree field)
2700 {
2701 return bit_from_pos (DECL_FIELD_OFFSET (field),
2702 DECL_FIELD_BIT_OFFSET (field));
2703 }
2704
2705 /* Likewise, but return as an integer. It must be representable in
2706 that way (since it could be a signed value, we don't have the
2707 option of returning -1 like int_size_in_byte can. */
2708
2709 HOST_WIDE_INT
2710 int_bit_position (const_tree field)
2711 {
2712 return tree_low_cst (bit_position (field), 0);
2713 }
2714 \f
2715 /* Return the byte position of FIELD, in bytes from the start of the record.
2716 This is a tree of type sizetype. */
2717
2718 tree
2719 byte_position (const_tree field)
2720 {
2721 return byte_from_pos (DECL_FIELD_OFFSET (field),
2722 DECL_FIELD_BIT_OFFSET (field));
2723 }
2724
2725 /* Likewise, but return as an integer. It must be representable in
2726 that way (since it could be a signed value, we don't have the
2727 option of returning -1 like int_size_in_byte can. */
2728
2729 HOST_WIDE_INT
2730 int_byte_position (const_tree field)
2731 {
2732 return tree_low_cst (byte_position (field), 0);
2733 }
2734 \f
2735 /* Return the strictest alignment, in bits, that T is known to have. */
2736
2737 unsigned int
2738 expr_align (const_tree t)
2739 {
2740 unsigned int align0, align1;
2741
2742 switch (TREE_CODE (t))
2743 {
2744 CASE_CONVERT: case NON_LVALUE_EXPR:
2745 /* If we have conversions, we know that the alignment of the
2746 object must meet each of the alignments of the types. */
2747 align0 = expr_align (TREE_OPERAND (t, 0));
2748 align1 = TYPE_ALIGN (TREE_TYPE (t));
2749 return MAX (align0, align1);
2750
2751 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
2752 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
2753 case CLEANUP_POINT_EXPR:
2754 /* These don't change the alignment of an object. */
2755 return expr_align (TREE_OPERAND (t, 0));
2756
2757 case COND_EXPR:
2758 /* The best we can do is say that the alignment is the least aligned
2759 of the two arms. */
2760 align0 = expr_align (TREE_OPERAND (t, 1));
2761 align1 = expr_align (TREE_OPERAND (t, 2));
2762 return MIN (align0, align1);
2763
2764 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
2765 meaningfully, it's always 1. */
2766 case LABEL_DECL: case CONST_DECL:
2767 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
2768 case FUNCTION_DECL:
2769 gcc_assert (DECL_ALIGN (t) != 0);
2770 return DECL_ALIGN (t);
2771
2772 default:
2773 break;
2774 }
2775
2776 /* Otherwise take the alignment from that of the type. */
2777 return TYPE_ALIGN (TREE_TYPE (t));
2778 }
2779 \f
2780 /* Return, as a tree node, the number of elements for TYPE (which is an
2781 ARRAY_TYPE) minus one. This counts only elements of the top array. */
2782
2783 tree
2784 array_type_nelts (const_tree type)
2785 {
2786 tree index_type, min, max;
2787
2788 /* If they did it with unspecified bounds, then we should have already
2789 given an error about it before we got here. */
2790 if (! TYPE_DOMAIN (type))
2791 return error_mark_node;
2792
2793 index_type = TYPE_DOMAIN (type);
2794 min = TYPE_MIN_VALUE (index_type);
2795 max = TYPE_MAX_VALUE (index_type);
2796
2797 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
2798 if (!max)
2799 return error_mark_node;
2800
2801 return (integer_zerop (min)
2802 ? max
2803 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
2804 }
2805 \f
2806 /* If arg is static -- a reference to an object in static storage -- then
2807 return the object. This is not the same as the C meaning of `static'.
2808 If arg isn't static, return NULL. */
2809
2810 tree
2811 staticp (tree arg)
2812 {
2813 switch (TREE_CODE (arg))
2814 {
2815 case FUNCTION_DECL:
2816 /* Nested functions are static, even though taking their address will
2817 involve a trampoline as we unnest the nested function and create
2818 the trampoline on the tree level. */
2819 return arg;
2820
2821 case VAR_DECL:
2822 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
2823 && ! DECL_THREAD_LOCAL_P (arg)
2824 && ! DECL_DLLIMPORT_P (arg)
2825 ? arg : NULL);
2826
2827 case CONST_DECL:
2828 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
2829 ? arg : NULL);
2830
2831 case CONSTRUCTOR:
2832 return TREE_STATIC (arg) ? arg : NULL;
2833
2834 case LABEL_DECL:
2835 case STRING_CST:
2836 return arg;
2837
2838 case COMPONENT_REF:
2839 /* If the thing being referenced is not a field, then it is
2840 something language specific. */
2841 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
2842
2843 /* If we are referencing a bitfield, we can't evaluate an
2844 ADDR_EXPR at compile time and so it isn't a constant. */
2845 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
2846 return NULL;
2847
2848 return staticp (TREE_OPERAND (arg, 0));
2849
2850 case BIT_FIELD_REF:
2851 return NULL;
2852
2853 case INDIRECT_REF:
2854 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
2855
2856 case ARRAY_REF:
2857 case ARRAY_RANGE_REF:
2858 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
2859 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
2860 return staticp (TREE_OPERAND (arg, 0));
2861 else
2862 return NULL;
2863
2864 case COMPOUND_LITERAL_EXPR:
2865 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
2866
2867 default:
2868 return NULL;
2869 }
2870 }
2871
2872 \f
2873
2874
2875 /* Return whether OP is a DECL whose address is function-invariant. */
2876
2877 bool
2878 decl_address_invariant_p (const_tree op)
2879 {
2880 /* The conditions below are slightly less strict than the one in
2881 staticp. */
2882
2883 switch (TREE_CODE (op))
2884 {
2885 case PARM_DECL:
2886 case RESULT_DECL:
2887 case LABEL_DECL:
2888 case FUNCTION_DECL:
2889 return true;
2890
2891 case VAR_DECL:
2892 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
2893 || DECL_THREAD_LOCAL_P (op)
2894 || DECL_CONTEXT (op) == current_function_decl
2895 || decl_function_context (op) == current_function_decl)
2896 return true;
2897 break;
2898
2899 case CONST_DECL:
2900 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
2901 || decl_function_context (op) == current_function_decl)
2902 return true;
2903 break;
2904
2905 default:
2906 break;
2907 }
2908
2909 return false;
2910 }
2911
2912 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
2913
2914 bool
2915 decl_address_ip_invariant_p (const_tree op)
2916 {
2917 /* The conditions below are slightly less strict than the one in
2918 staticp. */
2919
2920 switch (TREE_CODE (op))
2921 {
2922 case LABEL_DECL:
2923 case FUNCTION_DECL:
2924 case STRING_CST:
2925 return true;
2926
2927 case VAR_DECL:
2928 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
2929 && !DECL_DLLIMPORT_P (op))
2930 || DECL_THREAD_LOCAL_P (op))
2931 return true;
2932 break;
2933
2934 case CONST_DECL:
2935 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
2936 return true;
2937 break;
2938
2939 default:
2940 break;
2941 }
2942
2943 return false;
2944 }
2945
2946
2947 /* Return true if T is function-invariant (internal function, does
2948 not handle arithmetic; that's handled in skip_simple_arithmetic and
2949 tree_invariant_p). */
2950
2951 static bool tree_invariant_p (tree t);
2952
2953 static bool
2954 tree_invariant_p_1 (tree t)
2955 {
2956 tree op;
2957
2958 if (TREE_CONSTANT (t)
2959 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
2960 return true;
2961
2962 switch (TREE_CODE (t))
2963 {
2964 case SAVE_EXPR:
2965 return true;
2966
2967 case ADDR_EXPR:
2968 op = TREE_OPERAND (t, 0);
2969 while (handled_component_p (op))
2970 {
2971 switch (TREE_CODE (op))
2972 {
2973 case ARRAY_REF:
2974 case ARRAY_RANGE_REF:
2975 if (!tree_invariant_p (TREE_OPERAND (op, 1))
2976 || TREE_OPERAND (op, 2) != NULL_TREE
2977 || TREE_OPERAND (op, 3) != NULL_TREE)
2978 return false;
2979 break;
2980
2981 case COMPONENT_REF:
2982 if (TREE_OPERAND (op, 2) != NULL_TREE)
2983 return false;
2984 break;
2985
2986 default:;
2987 }
2988 op = TREE_OPERAND (op, 0);
2989 }
2990
2991 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
2992
2993 default:
2994 break;
2995 }
2996
2997 return false;
2998 }
2999
3000 /* Return true if T is function-invariant. */
3001
3002 static bool
3003 tree_invariant_p (tree t)
3004 {
3005 tree inner = skip_simple_arithmetic (t);
3006 return tree_invariant_p_1 (inner);
3007 }
3008
3009 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3010 Do this to any expression which may be used in more than one place,
3011 but must be evaluated only once.
3012
3013 Normally, expand_expr would reevaluate the expression each time.
3014 Calling save_expr produces something that is evaluated and recorded
3015 the first time expand_expr is called on it. Subsequent calls to
3016 expand_expr just reuse the recorded value.
3017
3018 The call to expand_expr that generates code that actually computes
3019 the value is the first call *at compile time*. Subsequent calls
3020 *at compile time* generate code to use the saved value.
3021 This produces correct result provided that *at run time* control
3022 always flows through the insns made by the first expand_expr
3023 before reaching the other places where the save_expr was evaluated.
3024 You, the caller of save_expr, must make sure this is so.
3025
3026 Constants, and certain read-only nodes, are returned with no
3027 SAVE_EXPR because that is safe. Expressions containing placeholders
3028 are not touched; see tree.def for an explanation of what these
3029 are used for. */
3030
3031 tree
3032 save_expr (tree expr)
3033 {
3034 tree t = fold (expr);
3035 tree inner;
3036
3037 /* If the tree evaluates to a constant, then we don't want to hide that
3038 fact (i.e. this allows further folding, and direct checks for constants).
3039 However, a read-only object that has side effects cannot be bypassed.
3040 Since it is no problem to reevaluate literals, we just return the
3041 literal node. */
3042 inner = skip_simple_arithmetic (t);
3043 if (TREE_CODE (inner) == ERROR_MARK)
3044 return inner;
3045
3046 if (tree_invariant_p_1 (inner))
3047 return t;
3048
3049 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3050 it means that the size or offset of some field of an object depends on
3051 the value within another field.
3052
3053 Note that it must not be the case that T contains both a PLACEHOLDER_EXPR
3054 and some variable since it would then need to be both evaluated once and
3055 evaluated more than once. Front-ends must assure this case cannot
3056 happen by surrounding any such subexpressions in their own SAVE_EXPR
3057 and forcing evaluation at the proper time. */
3058 if (contains_placeholder_p (inner))
3059 return t;
3060
3061 t = build1 (SAVE_EXPR, TREE_TYPE (expr), t);
3062 SET_EXPR_LOCATION (t, EXPR_LOCATION (expr));
3063
3064 /* This expression might be placed ahead of a jump to ensure that the
3065 value was computed on both sides of the jump. So make sure it isn't
3066 eliminated as dead. */
3067 TREE_SIDE_EFFECTS (t) = 1;
3068 return t;
3069 }
3070
3071 /* Look inside EXPR into any simple arithmetic operations. Return the
3072 outermost non-arithmetic or non-invariant node. */
3073
3074 tree
3075 skip_simple_arithmetic (tree expr)
3076 {
3077 /* We don't care about whether this can be used as an lvalue in this
3078 context. */
3079 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3080 expr = TREE_OPERAND (expr, 0);
3081
3082 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3083 a constant, it will be more efficient to not make another SAVE_EXPR since
3084 it will allow better simplification and GCSE will be able to merge the
3085 computations if they actually occur. */
3086 while (true)
3087 {
3088 if (UNARY_CLASS_P (expr))
3089 expr = TREE_OPERAND (expr, 0);
3090 else if (BINARY_CLASS_P (expr))
3091 {
3092 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3093 expr = TREE_OPERAND (expr, 0);
3094 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3095 expr = TREE_OPERAND (expr, 1);
3096 else
3097 break;
3098 }
3099 else
3100 break;
3101 }
3102
3103 return expr;
3104 }
3105
3106 /* Look inside EXPR into simple arithmetic operations involving constants.
3107 Return the outermost non-arithmetic or non-constant node. */
3108
3109 tree
3110 skip_simple_constant_arithmetic (tree expr)
3111 {
3112 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3113 expr = TREE_OPERAND (expr, 0);
3114
3115 while (true)
3116 {
3117 if (UNARY_CLASS_P (expr))
3118 expr = TREE_OPERAND (expr, 0);
3119 else if (BINARY_CLASS_P (expr))
3120 {
3121 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3122 expr = TREE_OPERAND (expr, 0);
3123 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3124 expr = TREE_OPERAND (expr, 1);
3125 else
3126 break;
3127 }
3128 else
3129 break;
3130 }
3131
3132 return expr;
3133 }
3134
3135 /* Return which tree structure is used by T. */
3136
3137 enum tree_node_structure_enum
3138 tree_node_structure (const_tree t)
3139 {
3140 const enum tree_code code = TREE_CODE (t);
3141 return tree_node_structure_for_code (code);
3142 }
3143
3144 /* Set various status flags when building a CALL_EXPR object T. */
3145
3146 static void
3147 process_call_operands (tree t)
3148 {
3149 bool side_effects = TREE_SIDE_EFFECTS (t);
3150 bool read_only = false;
3151 int i = call_expr_flags (t);
3152
3153 /* Calls have side-effects, except those to const or pure functions. */
3154 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3155 side_effects = true;
3156 /* Propagate TREE_READONLY of arguments for const functions. */
3157 if (i & ECF_CONST)
3158 read_only = true;
3159
3160 if (!side_effects || read_only)
3161 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3162 {
3163 tree op = TREE_OPERAND (t, i);
3164 if (op && TREE_SIDE_EFFECTS (op))
3165 side_effects = true;
3166 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3167 read_only = false;
3168 }
3169
3170 TREE_SIDE_EFFECTS (t) = side_effects;
3171 TREE_READONLY (t) = read_only;
3172 }
3173 \f
3174 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3175 size or offset that depends on a field within a record. */
3176
3177 bool
3178 contains_placeholder_p (const_tree exp)
3179 {
3180 enum tree_code code;
3181
3182 if (!exp)
3183 return 0;
3184
3185 code = TREE_CODE (exp);
3186 if (code == PLACEHOLDER_EXPR)
3187 return 1;
3188
3189 switch (TREE_CODE_CLASS (code))
3190 {
3191 case tcc_reference:
3192 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3193 position computations since they will be converted into a
3194 WITH_RECORD_EXPR involving the reference, which will assume
3195 here will be valid. */
3196 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3197
3198 case tcc_exceptional:
3199 if (code == TREE_LIST)
3200 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3201 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3202 break;
3203
3204 case tcc_unary:
3205 case tcc_binary:
3206 case tcc_comparison:
3207 case tcc_expression:
3208 switch (code)
3209 {
3210 case COMPOUND_EXPR:
3211 /* Ignoring the first operand isn't quite right, but works best. */
3212 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3213
3214 case COND_EXPR:
3215 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3216 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3217 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3218
3219 case SAVE_EXPR:
3220 /* The save_expr function never wraps anything containing
3221 a PLACEHOLDER_EXPR. */
3222 return 0;
3223
3224 default:
3225 break;
3226 }
3227
3228 switch (TREE_CODE_LENGTH (code))
3229 {
3230 case 1:
3231 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3232 case 2:
3233 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3234 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3235 default:
3236 return 0;
3237 }
3238
3239 case tcc_vl_exp:
3240 switch (code)
3241 {
3242 case CALL_EXPR:
3243 {
3244 const_tree arg;
3245 const_call_expr_arg_iterator iter;
3246 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3247 if (CONTAINS_PLACEHOLDER_P (arg))
3248 return 1;
3249 return 0;
3250 }
3251 default:
3252 return 0;
3253 }
3254
3255 default:
3256 return 0;
3257 }
3258 return 0;
3259 }
3260
3261 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3262 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3263 field positions. */
3264
3265 static bool
3266 type_contains_placeholder_1 (const_tree type)
3267 {
3268 /* If the size contains a placeholder or the parent type (component type in
3269 the case of arrays) type involves a placeholder, this type does. */
3270 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3271 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3272 || (!POINTER_TYPE_P (type)
3273 && TREE_TYPE (type)
3274 && type_contains_placeholder_p (TREE_TYPE (type))))
3275 return true;
3276
3277 /* Now do type-specific checks. Note that the last part of the check above
3278 greatly limits what we have to do below. */
3279 switch (TREE_CODE (type))
3280 {
3281 case VOID_TYPE:
3282 case POINTER_BOUNDS_TYPE:
3283 case COMPLEX_TYPE:
3284 case ENUMERAL_TYPE:
3285 case BOOLEAN_TYPE:
3286 case POINTER_TYPE:
3287 case OFFSET_TYPE:
3288 case REFERENCE_TYPE:
3289 case METHOD_TYPE:
3290 case FUNCTION_TYPE:
3291 case VECTOR_TYPE:
3292 case NULLPTR_TYPE:
3293 return false;
3294
3295 case INTEGER_TYPE:
3296 case REAL_TYPE:
3297 case FIXED_POINT_TYPE:
3298 /* Here we just check the bounds. */
3299 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3300 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3301
3302 case ARRAY_TYPE:
3303 /* We have already checked the component type above, so just check the
3304 domain type. */
3305 return type_contains_placeholder_p (TYPE_DOMAIN (type));
3306
3307 case RECORD_TYPE:
3308 case UNION_TYPE:
3309 case QUAL_UNION_TYPE:
3310 {
3311 tree field;
3312
3313 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3314 if (TREE_CODE (field) == FIELD_DECL
3315 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3316 || (TREE_CODE (type) == QUAL_UNION_TYPE
3317 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3318 || type_contains_placeholder_p (TREE_TYPE (field))))
3319 return true;
3320
3321 return false;
3322 }
3323
3324 default:
3325 gcc_unreachable ();
3326 }
3327 }
3328
3329 /* Wrapper around above function used to cache its result. */
3330
3331 bool
3332 type_contains_placeholder_p (tree type)
3333 {
3334 bool result;
3335
3336 /* If the contains_placeholder_bits field has been initialized,
3337 then we know the answer. */
3338 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3339 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3340
3341 /* Indicate that we've seen this type node, and the answer is false.
3342 This is what we want to return if we run into recursion via fields. */
3343 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3344
3345 /* Compute the real value. */
3346 result = type_contains_placeholder_1 (type);
3347
3348 /* Store the real value. */
3349 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3350
3351 return result;
3352 }
3353 \f
3354 /* Push tree EXP onto vector QUEUE if it is not already present. */
3355
3356 static void
3357 push_without_duplicates (tree exp, vec<tree> *queue)
3358 {
3359 unsigned int i;
3360 tree iter;
3361
3362 FOR_EACH_VEC_ELT (*queue, i, iter)
3363 if (simple_cst_equal (iter, exp) == 1)
3364 break;
3365
3366 if (!iter)
3367 queue->safe_push (exp);
3368 }
3369
3370 /* Given a tree EXP, find all occurrences of references to fields
3371 in a PLACEHOLDER_EXPR and place them in vector REFS without
3372 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3373 we assume here that EXP contains only arithmetic expressions
3374 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3375 argument list. */
3376
3377 void
3378 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3379 {
3380 enum tree_code code = TREE_CODE (exp);
3381 tree inner;
3382 int i;
3383
3384 /* We handle TREE_LIST and COMPONENT_REF separately. */
3385 if (code == TREE_LIST)
3386 {
3387 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3388 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3389 }
3390 else if (code == COMPONENT_REF)
3391 {
3392 for (inner = TREE_OPERAND (exp, 0);
3393 REFERENCE_CLASS_P (inner);
3394 inner = TREE_OPERAND (inner, 0))
3395 ;
3396
3397 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3398 push_without_duplicates (exp, refs);
3399 else
3400 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3401 }
3402 else
3403 switch (TREE_CODE_CLASS (code))
3404 {
3405 case tcc_constant:
3406 break;
3407
3408 case tcc_declaration:
3409 /* Variables allocated to static storage can stay. */
3410 if (!TREE_STATIC (exp))
3411 push_without_duplicates (exp, refs);
3412 break;
3413
3414 case tcc_expression:
3415 /* This is the pattern built in ada/make_aligning_type. */
3416 if (code == ADDR_EXPR
3417 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3418 {
3419 push_without_duplicates (exp, refs);
3420 break;
3421 }
3422
3423 /* Fall through... */
3424
3425 case tcc_exceptional:
3426 case tcc_unary:
3427 case tcc_binary:
3428 case tcc_comparison:
3429 case tcc_reference:
3430 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3431 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3432 break;
3433
3434 case tcc_vl_exp:
3435 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3436 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3437 break;
3438
3439 default:
3440 gcc_unreachable ();
3441 }
3442 }
3443
3444 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3445 return a tree with all occurrences of references to F in a
3446 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3447 CONST_DECLs. Note that we assume here that EXP contains only
3448 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3449 occurring only in their argument list. */
3450
3451 tree
3452 substitute_in_expr (tree exp, tree f, tree r)
3453 {
3454 enum tree_code code = TREE_CODE (exp);
3455 tree op0, op1, op2, op3;
3456 tree new_tree;
3457
3458 /* We handle TREE_LIST and COMPONENT_REF separately. */
3459 if (code == TREE_LIST)
3460 {
3461 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3462 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3463 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3464 return exp;
3465
3466 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3467 }
3468 else if (code == COMPONENT_REF)
3469 {
3470 tree inner;
3471
3472 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3473 and it is the right field, replace it with R. */
3474 for (inner = TREE_OPERAND (exp, 0);
3475 REFERENCE_CLASS_P (inner);
3476 inner = TREE_OPERAND (inner, 0))
3477 ;
3478
3479 /* The field. */
3480 op1 = TREE_OPERAND (exp, 1);
3481
3482 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3483 return r;
3484
3485 /* If this expression hasn't been completed let, leave it alone. */
3486 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
3487 return exp;
3488
3489 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3490 if (op0 == TREE_OPERAND (exp, 0))
3491 return exp;
3492
3493 new_tree
3494 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
3495 }
3496 else
3497 switch (TREE_CODE_CLASS (code))
3498 {
3499 case tcc_constant:
3500 return exp;
3501
3502 case tcc_declaration:
3503 if (exp == f)
3504 return r;
3505 else
3506 return exp;
3507
3508 case tcc_expression:
3509 if (exp == f)
3510 return r;
3511
3512 /* Fall through... */
3513
3514 case tcc_exceptional:
3515 case tcc_unary:
3516 case tcc_binary:
3517 case tcc_comparison:
3518 case tcc_reference:
3519 switch (TREE_CODE_LENGTH (code))
3520 {
3521 case 0:
3522 return exp;
3523
3524 case 1:
3525 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3526 if (op0 == TREE_OPERAND (exp, 0))
3527 return exp;
3528
3529 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3530 break;
3531
3532 case 2:
3533 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3534 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3535
3536 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3537 return exp;
3538
3539 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3540 break;
3541
3542 case 3:
3543 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3544 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3545 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3546
3547 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3548 && op2 == TREE_OPERAND (exp, 2))
3549 return exp;
3550
3551 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3552 break;
3553
3554 case 4:
3555 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3556 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3557 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3558 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
3559
3560 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3561 && op2 == TREE_OPERAND (exp, 2)
3562 && op3 == TREE_OPERAND (exp, 3))
3563 return exp;
3564
3565 new_tree
3566 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3567 break;
3568
3569 default:
3570 gcc_unreachable ();
3571 }
3572 break;
3573
3574 case tcc_vl_exp:
3575 {
3576 int i;
3577
3578 new_tree = NULL_TREE;
3579
3580 /* If we are trying to replace F with a constant, inline back
3581 functions which do nothing else than computing a value from
3582 the arguments they are passed. This makes it possible to
3583 fold partially or entirely the replacement expression. */
3584 if (CONSTANT_CLASS_P (r) && code == CALL_EXPR)
3585 {
3586 tree t = maybe_inline_call_in_expr (exp);
3587 if (t)
3588 return SUBSTITUTE_IN_EXPR (t, f, r);
3589 }
3590
3591 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3592 {
3593 tree op = TREE_OPERAND (exp, i);
3594 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
3595 if (new_op != op)
3596 {
3597 if (!new_tree)
3598 new_tree = copy_node (exp);
3599 TREE_OPERAND (new_tree, i) = new_op;
3600 }
3601 }
3602
3603 if (new_tree)
3604 {
3605 new_tree = fold (new_tree);
3606 if (TREE_CODE (new_tree) == CALL_EXPR)
3607 process_call_operands (new_tree);
3608 }
3609 else
3610 return exp;
3611 }
3612 break;
3613
3614 default:
3615 gcc_unreachable ();
3616 }
3617
3618 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3619
3620 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3621 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3622
3623 return new_tree;
3624 }
3625
3626 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3627 for it within OBJ, a tree that is an object or a chain of references. */
3628
3629 tree
3630 substitute_placeholder_in_expr (tree exp, tree obj)
3631 {
3632 enum tree_code code = TREE_CODE (exp);
3633 tree op0, op1, op2, op3;
3634 tree new_tree;
3635
3636 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3637 in the chain of OBJ. */
3638 if (code == PLACEHOLDER_EXPR)
3639 {
3640 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
3641 tree elt;
3642
3643 for (elt = obj; elt != 0;
3644 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3645 || TREE_CODE (elt) == COND_EXPR)
3646 ? TREE_OPERAND (elt, 1)
3647 : (REFERENCE_CLASS_P (elt)
3648 || UNARY_CLASS_P (elt)
3649 || BINARY_CLASS_P (elt)
3650 || VL_EXP_CLASS_P (elt)
3651 || EXPRESSION_CLASS_P (elt))
3652 ? TREE_OPERAND (elt, 0) : 0))
3653 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
3654 return elt;
3655
3656 for (elt = obj; elt != 0;
3657 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3658 || TREE_CODE (elt) == COND_EXPR)
3659 ? TREE_OPERAND (elt, 1)
3660 : (REFERENCE_CLASS_P (elt)
3661 || UNARY_CLASS_P (elt)
3662 || BINARY_CLASS_P (elt)
3663 || VL_EXP_CLASS_P (elt)
3664 || EXPRESSION_CLASS_P (elt))
3665 ? TREE_OPERAND (elt, 0) : 0))
3666 if (POINTER_TYPE_P (TREE_TYPE (elt))
3667 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
3668 == need_type))
3669 return fold_build1 (INDIRECT_REF, need_type, elt);
3670
3671 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
3672 survives until RTL generation, there will be an error. */
3673 return exp;
3674 }
3675
3676 /* TREE_LIST is special because we need to look at TREE_VALUE
3677 and TREE_CHAIN, not TREE_OPERANDS. */
3678 else if (code == TREE_LIST)
3679 {
3680 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
3681 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
3682 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3683 return exp;
3684
3685 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3686 }
3687 else
3688 switch (TREE_CODE_CLASS (code))
3689 {
3690 case tcc_constant:
3691 case tcc_declaration:
3692 return exp;
3693
3694 case tcc_exceptional:
3695 case tcc_unary:
3696 case tcc_binary:
3697 case tcc_comparison:
3698 case tcc_expression:
3699 case tcc_reference:
3700 case tcc_statement:
3701 switch (TREE_CODE_LENGTH (code))
3702 {
3703 case 0:
3704 return exp;
3705
3706 case 1:
3707 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3708 if (op0 == TREE_OPERAND (exp, 0))
3709 return exp;
3710
3711 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3712 break;
3713
3714 case 2:
3715 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3716 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3717
3718 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3719 return exp;
3720
3721 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3722 break;
3723
3724 case 3:
3725 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3726 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3727 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3728
3729 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3730 && op2 == TREE_OPERAND (exp, 2))
3731 return exp;
3732
3733 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3734 break;
3735
3736 case 4:
3737 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3738 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3739 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3740 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
3741
3742 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3743 && op2 == TREE_OPERAND (exp, 2)
3744 && op3 == TREE_OPERAND (exp, 3))
3745 return exp;
3746
3747 new_tree
3748 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3749 break;
3750
3751 default:
3752 gcc_unreachable ();
3753 }
3754 break;
3755
3756 case tcc_vl_exp:
3757 {
3758 int i;
3759
3760 new_tree = NULL_TREE;
3761
3762 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3763 {
3764 tree op = TREE_OPERAND (exp, i);
3765 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
3766 if (new_op != op)
3767 {
3768 if (!new_tree)
3769 new_tree = copy_node (exp);
3770 TREE_OPERAND (new_tree, i) = new_op;
3771 }
3772 }
3773
3774 if (new_tree)
3775 {
3776 new_tree = fold (new_tree);
3777 if (TREE_CODE (new_tree) == CALL_EXPR)
3778 process_call_operands (new_tree);
3779 }
3780 else
3781 return exp;
3782 }
3783 break;
3784
3785 default:
3786 gcc_unreachable ();
3787 }
3788
3789 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3790
3791 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3792 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3793
3794 return new_tree;
3795 }
3796 \f
3797
3798 /* Subroutine of stabilize_reference; this is called for subtrees of
3799 references. Any expression with side-effects must be put in a SAVE_EXPR
3800 to ensure that it is only evaluated once.
3801
3802 We don't put SAVE_EXPR nodes around everything, because assigning very
3803 simple expressions to temporaries causes us to miss good opportunities
3804 for optimizations. Among other things, the opportunity to fold in the
3805 addition of a constant into an addressing mode often gets lost, e.g.
3806 "y[i+1] += x;". In general, we take the approach that we should not make
3807 an assignment unless we are forced into it - i.e., that any non-side effect
3808 operator should be allowed, and that cse should take care of coalescing
3809 multiple utterances of the same expression should that prove fruitful. */
3810
3811 static tree
3812 stabilize_reference_1 (tree e)
3813 {
3814 tree result;
3815 enum tree_code code = TREE_CODE (e);
3816
3817 /* We cannot ignore const expressions because it might be a reference
3818 to a const array but whose index contains side-effects. But we can
3819 ignore things that are actual constant or that already have been
3820 handled by this function. */
3821
3822 if (tree_invariant_p (e))
3823 return e;
3824
3825 switch (TREE_CODE_CLASS (code))
3826 {
3827 case tcc_exceptional:
3828 case tcc_type:
3829 case tcc_declaration:
3830 case tcc_comparison:
3831 case tcc_statement:
3832 case tcc_expression:
3833 case tcc_reference:
3834 case tcc_vl_exp:
3835 /* If the expression has side-effects, then encase it in a SAVE_EXPR
3836 so that it will only be evaluated once. */
3837 /* The reference (r) and comparison (<) classes could be handled as
3838 below, but it is generally faster to only evaluate them once. */
3839 if (TREE_SIDE_EFFECTS (e))
3840 return save_expr (e);
3841 return e;
3842
3843 case tcc_constant:
3844 /* Constants need no processing. In fact, we should never reach
3845 here. */
3846 return e;
3847
3848 case tcc_binary:
3849 /* Division is slow and tends to be compiled with jumps,
3850 especially the division by powers of 2 that is often
3851 found inside of an array reference. So do it just once. */
3852 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
3853 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
3854 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
3855 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
3856 return save_expr (e);
3857 /* Recursively stabilize each operand. */
3858 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
3859 stabilize_reference_1 (TREE_OPERAND (e, 1)));
3860 break;
3861
3862 case tcc_unary:
3863 /* Recursively stabilize each operand. */
3864 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
3865 break;
3866
3867 default:
3868 gcc_unreachable ();
3869 }
3870
3871 TREE_TYPE (result) = TREE_TYPE (e);
3872 TREE_READONLY (result) = TREE_READONLY (e);
3873 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
3874 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
3875
3876 return result;
3877 }
3878
3879 /* Stabilize a reference so that we can use it any number of times
3880 without causing its operands to be evaluated more than once.
3881 Returns the stabilized reference. This works by means of save_expr,
3882 so see the caveats in the comments about save_expr.
3883
3884 Also allows conversion expressions whose operands are references.
3885 Any other kind of expression is returned unchanged. */
3886
3887 tree
3888 stabilize_reference (tree ref)
3889 {
3890 tree result;
3891 enum tree_code code = TREE_CODE (ref);
3892
3893 switch (code)
3894 {
3895 case VAR_DECL:
3896 case PARM_DECL:
3897 case RESULT_DECL:
3898 /* No action is needed in this case. */
3899 return ref;
3900
3901 CASE_CONVERT:
3902 case FLOAT_EXPR:
3903 case FIX_TRUNC_EXPR:
3904 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
3905 break;
3906
3907 case INDIRECT_REF:
3908 result = build_nt (INDIRECT_REF,
3909 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
3910 break;
3911
3912 case COMPONENT_REF:
3913 result = build_nt (COMPONENT_REF,
3914 stabilize_reference (TREE_OPERAND (ref, 0)),
3915 TREE_OPERAND (ref, 1), NULL_TREE);
3916 break;
3917
3918 case BIT_FIELD_REF:
3919 result = build_nt (BIT_FIELD_REF,
3920 stabilize_reference (TREE_OPERAND (ref, 0)),
3921 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
3922 break;
3923
3924 case ARRAY_REF:
3925 result = build_nt (ARRAY_REF,
3926 stabilize_reference (TREE_OPERAND (ref, 0)),
3927 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
3928 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
3929 break;
3930
3931 case ARRAY_RANGE_REF:
3932 result = build_nt (ARRAY_RANGE_REF,
3933 stabilize_reference (TREE_OPERAND (ref, 0)),
3934 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
3935 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
3936 break;
3937
3938 case COMPOUND_EXPR:
3939 /* We cannot wrap the first expression in a SAVE_EXPR, as then
3940 it wouldn't be ignored. This matters when dealing with
3941 volatiles. */
3942 return stabilize_reference_1 (ref);
3943
3944 /* If arg isn't a kind of lvalue we recognize, make no change.
3945 Caller should recognize the error for an invalid lvalue. */
3946 default:
3947 return ref;
3948
3949 case ERROR_MARK:
3950 return error_mark_node;
3951 }
3952
3953 TREE_TYPE (result) = TREE_TYPE (ref);
3954 TREE_READONLY (result) = TREE_READONLY (ref);
3955 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
3956 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
3957
3958 return result;
3959 }
3960 \f
3961 /* Low-level constructors for expressions. */
3962
3963 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
3964 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
3965
3966 void
3967 recompute_tree_invariant_for_addr_expr (tree t)
3968 {
3969 tree node;
3970 bool tc = true, se = false;
3971
3972 /* We started out assuming this address is both invariant and constant, but
3973 does not have side effects. Now go down any handled components and see if
3974 any of them involve offsets that are either non-constant or non-invariant.
3975 Also check for side-effects.
3976
3977 ??? Note that this code makes no attempt to deal with the case where
3978 taking the address of something causes a copy due to misalignment. */
3979
3980 #define UPDATE_FLAGS(NODE) \
3981 do { tree _node = (NODE); \
3982 if (_node && !TREE_CONSTANT (_node)) tc = false; \
3983 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
3984
3985 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
3986 node = TREE_OPERAND (node, 0))
3987 {
3988 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
3989 array reference (probably made temporarily by the G++ front end),
3990 so ignore all the operands. */
3991 if ((TREE_CODE (node) == ARRAY_REF
3992 || TREE_CODE (node) == ARRAY_RANGE_REF)
3993 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
3994 {
3995 UPDATE_FLAGS (TREE_OPERAND (node, 1));
3996 if (TREE_OPERAND (node, 2))
3997 UPDATE_FLAGS (TREE_OPERAND (node, 2));
3998 if (TREE_OPERAND (node, 3))
3999 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4000 }
4001 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4002 FIELD_DECL, apparently. The G++ front end can put something else
4003 there, at least temporarily. */
4004 else if (TREE_CODE (node) == COMPONENT_REF
4005 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4006 {
4007 if (TREE_OPERAND (node, 2))
4008 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4009 }
4010 }
4011
4012 node = lang_hooks.expr_to_decl (node, &tc, &se);
4013
4014 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4015 the address, since &(*a)->b is a form of addition. If it's a constant, the
4016 address is constant too. If it's a decl, its address is constant if the
4017 decl is static. Everything else is not constant and, furthermore,
4018 taking the address of a volatile variable is not volatile. */
4019 if (TREE_CODE (node) == INDIRECT_REF
4020 || TREE_CODE (node) == MEM_REF)
4021 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4022 else if (CONSTANT_CLASS_P (node))
4023 ;
4024 else if (DECL_P (node))
4025 tc &= (staticp (node) != NULL_TREE);
4026 else
4027 {
4028 tc = false;
4029 se |= TREE_SIDE_EFFECTS (node);
4030 }
4031
4032
4033 TREE_CONSTANT (t) = tc;
4034 TREE_SIDE_EFFECTS (t) = se;
4035 #undef UPDATE_FLAGS
4036 }
4037
4038 /* Build an expression of code CODE, data type TYPE, and operands as
4039 specified. Expressions and reference nodes can be created this way.
4040 Constants, decls, types and misc nodes cannot be.
4041
4042 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4043 enough for all extant tree codes. */
4044
4045 tree
4046 build0_stat (enum tree_code code, tree tt MEM_STAT_DECL)
4047 {
4048 tree t;
4049
4050 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4051
4052 t = make_node_stat (code PASS_MEM_STAT);
4053 TREE_TYPE (t) = tt;
4054
4055 return t;
4056 }
4057
4058 tree
4059 build1_stat (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4060 {
4061 int length = sizeof (struct tree_exp);
4062 tree t;
4063
4064 record_node_allocation_statistics (code, length);
4065
4066 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4067
4068 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4069
4070 memset (t, 0, sizeof (struct tree_common));
4071
4072 TREE_SET_CODE (t, code);
4073
4074 TREE_TYPE (t) = type;
4075 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4076 TREE_OPERAND (t, 0) = node;
4077 if (node && !TYPE_P (node))
4078 {
4079 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4080 TREE_READONLY (t) = TREE_READONLY (node);
4081 }
4082
4083 if (TREE_CODE_CLASS (code) == tcc_statement)
4084 TREE_SIDE_EFFECTS (t) = 1;
4085 else switch (code)
4086 {
4087 case VA_ARG_EXPR:
4088 /* All of these have side-effects, no matter what their
4089 operands are. */
4090 TREE_SIDE_EFFECTS (t) = 1;
4091 TREE_READONLY (t) = 0;
4092 break;
4093
4094 case INDIRECT_REF:
4095 /* Whether a dereference is readonly has nothing to do with whether
4096 its operand is readonly. */
4097 TREE_READONLY (t) = 0;
4098 break;
4099
4100 case ADDR_EXPR:
4101 if (node)
4102 recompute_tree_invariant_for_addr_expr (t);
4103 break;
4104
4105 default:
4106 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4107 && node && !TYPE_P (node)
4108 && TREE_CONSTANT (node))
4109 TREE_CONSTANT (t) = 1;
4110 if (TREE_CODE_CLASS (code) == tcc_reference
4111 && node && TREE_THIS_VOLATILE (node))
4112 TREE_THIS_VOLATILE (t) = 1;
4113 break;
4114 }
4115
4116 return t;
4117 }
4118
4119 #define PROCESS_ARG(N) \
4120 do { \
4121 TREE_OPERAND (t, N) = arg##N; \
4122 if (arg##N &&!TYPE_P (arg##N)) \
4123 { \
4124 if (TREE_SIDE_EFFECTS (arg##N)) \
4125 side_effects = 1; \
4126 if (!TREE_READONLY (arg##N) \
4127 && !CONSTANT_CLASS_P (arg##N)) \
4128 (void) (read_only = 0); \
4129 if (!TREE_CONSTANT (arg##N)) \
4130 (void) (constant = 0); \
4131 } \
4132 } while (0)
4133
4134 tree
4135 build2_stat (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4136 {
4137 bool constant, read_only, side_effects;
4138 tree t;
4139
4140 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4141
4142 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4143 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4144 /* When sizetype precision doesn't match that of pointers
4145 we need to be able to build explicit extensions or truncations
4146 of the offset argument. */
4147 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4148 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4149 && TREE_CODE (arg1) == INTEGER_CST);
4150
4151 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4152 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4153 && ptrofftype_p (TREE_TYPE (arg1)));
4154
4155 t = make_node_stat (code PASS_MEM_STAT);
4156 TREE_TYPE (t) = tt;
4157
4158 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4159 result based on those same flags for the arguments. But if the
4160 arguments aren't really even `tree' expressions, we shouldn't be trying
4161 to do this. */
4162
4163 /* Expressions without side effects may be constant if their
4164 arguments are as well. */
4165 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4166 || TREE_CODE_CLASS (code) == tcc_binary);
4167 read_only = 1;
4168 side_effects = TREE_SIDE_EFFECTS (t);
4169
4170 PROCESS_ARG (0);
4171 PROCESS_ARG (1);
4172
4173 TREE_READONLY (t) = read_only;
4174 TREE_CONSTANT (t) = constant;
4175 TREE_SIDE_EFFECTS (t) = side_effects;
4176 TREE_THIS_VOLATILE (t)
4177 = (TREE_CODE_CLASS (code) == tcc_reference
4178 && arg0 && TREE_THIS_VOLATILE (arg0));
4179
4180 return t;
4181 }
4182
4183
4184 tree
4185 build3_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4186 tree arg2 MEM_STAT_DECL)
4187 {
4188 bool constant, read_only, side_effects;
4189 tree t;
4190
4191 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4192 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4193
4194 t = make_node_stat (code PASS_MEM_STAT);
4195 TREE_TYPE (t) = tt;
4196
4197 read_only = 1;
4198
4199 /* As a special exception, if COND_EXPR has NULL branches, we
4200 assume that it is a gimple statement and always consider
4201 it to have side effects. */
4202 if (code == COND_EXPR
4203 && tt == void_type_node
4204 && arg1 == NULL_TREE
4205 && arg2 == NULL_TREE)
4206 side_effects = true;
4207 else
4208 side_effects = TREE_SIDE_EFFECTS (t);
4209
4210 PROCESS_ARG (0);
4211 PROCESS_ARG (1);
4212 PROCESS_ARG (2);
4213
4214 if (code == COND_EXPR)
4215 TREE_READONLY (t) = read_only;
4216
4217 TREE_SIDE_EFFECTS (t) = side_effects;
4218 TREE_THIS_VOLATILE (t)
4219 = (TREE_CODE_CLASS (code) == tcc_reference
4220 && arg0 && TREE_THIS_VOLATILE (arg0));
4221
4222 return t;
4223 }
4224
4225 tree
4226 build4_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4227 tree arg2, tree arg3 MEM_STAT_DECL)
4228 {
4229 bool constant, read_only, side_effects;
4230 tree t;
4231
4232 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4233
4234 t = make_node_stat (code PASS_MEM_STAT);
4235 TREE_TYPE (t) = tt;
4236
4237 side_effects = TREE_SIDE_EFFECTS (t);
4238
4239 PROCESS_ARG (0);
4240 PROCESS_ARG (1);
4241 PROCESS_ARG (2);
4242 PROCESS_ARG (3);
4243
4244 TREE_SIDE_EFFECTS (t) = side_effects;
4245 TREE_THIS_VOLATILE (t)
4246 = (TREE_CODE_CLASS (code) == tcc_reference
4247 && arg0 && TREE_THIS_VOLATILE (arg0));
4248
4249 return t;
4250 }
4251
4252 tree
4253 build5_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4254 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4255 {
4256 bool constant, read_only, side_effects;
4257 tree t;
4258
4259 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4260
4261 t = make_node_stat (code PASS_MEM_STAT);
4262 TREE_TYPE (t) = tt;
4263
4264 side_effects = TREE_SIDE_EFFECTS (t);
4265
4266 PROCESS_ARG (0);
4267 PROCESS_ARG (1);
4268 PROCESS_ARG (2);
4269 PROCESS_ARG (3);
4270 PROCESS_ARG (4);
4271
4272 TREE_SIDE_EFFECTS (t) = side_effects;
4273 TREE_THIS_VOLATILE (t)
4274 = (TREE_CODE_CLASS (code) == tcc_reference
4275 && arg0 && TREE_THIS_VOLATILE (arg0));
4276
4277 return t;
4278 }
4279
4280 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4281 on the pointer PTR. */
4282
4283 tree
4284 build_simple_mem_ref_loc (location_t loc, tree ptr)
4285 {
4286 HOST_WIDE_INT offset = 0;
4287 tree ptype = TREE_TYPE (ptr);
4288 tree tem;
4289 /* For convenience allow addresses that collapse to a simple base
4290 and offset. */
4291 if (TREE_CODE (ptr) == ADDR_EXPR
4292 && (handled_component_p (TREE_OPERAND (ptr, 0))
4293 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4294 {
4295 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4296 gcc_assert (ptr);
4297 ptr = build_fold_addr_expr (ptr);
4298 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4299 }
4300 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4301 ptr, build_int_cst (ptype, offset));
4302 SET_EXPR_LOCATION (tem, loc);
4303 return tem;
4304 }
4305
4306 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4307
4308 double_int
4309 mem_ref_offset (const_tree t)
4310 {
4311 tree toff = TREE_OPERAND (t, 1);
4312 return tree_to_double_int (toff).sext (TYPE_PRECISION (TREE_TYPE (toff)));
4313 }
4314
4315 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4316 offsetted by OFFSET units. */
4317
4318 tree
4319 build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
4320 {
4321 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4322 build_fold_addr_expr (base),
4323 build_int_cst (ptr_type_node, offset));
4324 tree addr = build1 (ADDR_EXPR, type, ref);
4325 recompute_tree_invariant_for_addr_expr (addr);
4326 return addr;
4327 }
4328
4329 /* Similar except don't specify the TREE_TYPE
4330 and leave the TREE_SIDE_EFFECTS as 0.
4331 It is permissible for arguments to be null,
4332 or even garbage if their values do not matter. */
4333
4334 tree
4335 build_nt (enum tree_code code, ...)
4336 {
4337 tree t;
4338 int length;
4339 int i;
4340 va_list p;
4341
4342 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4343
4344 va_start (p, code);
4345
4346 t = make_node (code);
4347 length = TREE_CODE_LENGTH (code);
4348
4349 for (i = 0; i < length; i++)
4350 TREE_OPERAND (t, i) = va_arg (p, tree);
4351
4352 va_end (p);
4353 return t;
4354 }
4355
4356 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4357 tree vec. */
4358
4359 tree
4360 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4361 {
4362 tree ret, t;
4363 unsigned int ix;
4364
4365 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4366 CALL_EXPR_FN (ret) = fn;
4367 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4368 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4369 CALL_EXPR_ARG (ret, ix) = t;
4370 return ret;
4371 }
4372 \f
4373 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4374 We do NOT enter this node in any sort of symbol table.
4375
4376 LOC is the location of the decl.
4377
4378 layout_decl is used to set up the decl's storage layout.
4379 Other slots are initialized to 0 or null pointers. */
4380
4381 tree
4382 build_decl_stat (location_t loc, enum tree_code code, tree name,
4383 tree type MEM_STAT_DECL)
4384 {
4385 tree t;
4386
4387 t = make_node_stat (code PASS_MEM_STAT);
4388 DECL_SOURCE_LOCATION (t) = loc;
4389
4390 /* if (type == error_mark_node)
4391 type = integer_type_node; */
4392 /* That is not done, deliberately, so that having error_mark_node
4393 as the type can suppress useless errors in the use of this variable. */
4394
4395 DECL_NAME (t) = name;
4396 TREE_TYPE (t) = type;
4397
4398 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4399 layout_decl (t, 0);
4400
4401 return t;
4402 }
4403
4404 /* Builds and returns function declaration with NAME and TYPE. */
4405
4406 tree
4407 build_fn_decl (const char *name, tree type)
4408 {
4409 tree id = get_identifier (name);
4410 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4411
4412 DECL_EXTERNAL (decl) = 1;
4413 TREE_PUBLIC (decl) = 1;
4414 DECL_ARTIFICIAL (decl) = 1;
4415 TREE_NOTHROW (decl) = 1;
4416
4417 return decl;
4418 }
4419
4420 vec<tree, va_gc> *all_translation_units;
4421
4422 /* Builds a new translation-unit decl with name NAME, queues it in the
4423 global list of translation-unit decls and returns it. */
4424
4425 tree
4426 build_translation_unit_decl (tree name)
4427 {
4428 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
4429 name, NULL_TREE);
4430 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
4431 vec_safe_push (all_translation_units, tu);
4432 return tu;
4433 }
4434
4435 \f
4436 /* BLOCK nodes are used to represent the structure of binding contours
4437 and declarations, once those contours have been exited and their contents
4438 compiled. This information is used for outputting debugging info. */
4439
4440 tree
4441 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
4442 {
4443 tree block = make_node (BLOCK);
4444
4445 BLOCK_VARS (block) = vars;
4446 BLOCK_SUBBLOCKS (block) = subblocks;
4447 BLOCK_SUPERCONTEXT (block) = supercontext;
4448 BLOCK_CHAIN (block) = chain;
4449 return block;
4450 }
4451
4452 \f
4453 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4454
4455 LOC is the location to use in tree T. */
4456
4457 void
4458 protected_set_expr_location (tree t, location_t loc)
4459 {
4460 if (t && CAN_HAVE_LOCATION_P (t))
4461 SET_EXPR_LOCATION (t, loc);
4462 }
4463 \f
4464 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES
4465 is ATTRIBUTE. */
4466
4467 tree
4468 build_decl_attribute_variant (tree ddecl, tree attribute)
4469 {
4470 DECL_ATTRIBUTES (ddecl) = attribute;
4471 return ddecl;
4472 }
4473
4474 /* Borrowed from hashtab.c iterative_hash implementation. */
4475 #define mix(a,b,c) \
4476 { \
4477 a -= b; a -= c; a ^= (c>>13); \
4478 b -= c; b -= a; b ^= (a<< 8); \
4479 c -= a; c -= b; c ^= ((b&0xffffffff)>>13); \
4480 a -= b; a -= c; a ^= ((c&0xffffffff)>>12); \
4481 b -= c; b -= a; b = (b ^ (a<<16)) & 0xffffffff; \
4482 c -= a; c -= b; c = (c ^ (b>> 5)) & 0xffffffff; \
4483 a -= b; a -= c; a = (a ^ (c>> 3)) & 0xffffffff; \
4484 b -= c; b -= a; b = (b ^ (a<<10)) & 0xffffffff; \
4485 c -= a; c -= b; c = (c ^ (b>>15)) & 0xffffffff; \
4486 }
4487
4488
4489 /* Produce good hash value combining VAL and VAL2. */
4490 hashval_t
4491 iterative_hash_hashval_t (hashval_t val, hashval_t val2)
4492 {
4493 /* the golden ratio; an arbitrary value. */
4494 hashval_t a = 0x9e3779b9;
4495
4496 mix (a, val, val2);
4497 return val2;
4498 }
4499
4500 /* Produce good hash value combining VAL and VAL2. */
4501 hashval_t
4502 iterative_hash_host_wide_int (HOST_WIDE_INT val, hashval_t val2)
4503 {
4504 if (sizeof (HOST_WIDE_INT) == sizeof (hashval_t))
4505 return iterative_hash_hashval_t (val, val2);
4506 else
4507 {
4508 hashval_t a = (hashval_t) val;
4509 /* Avoid warnings about shifting of more than the width of the type on
4510 hosts that won't execute this path. */
4511 int zero = 0;
4512 hashval_t b = (hashval_t) (val >> (sizeof (hashval_t) * 8 + zero));
4513 mix (a, b, val2);
4514 if (sizeof (HOST_WIDE_INT) > 2 * sizeof (hashval_t))
4515 {
4516 hashval_t a = (hashval_t) (val >> (sizeof (hashval_t) * 16 + zero));
4517 hashval_t b = (hashval_t) (val >> (sizeof (hashval_t) * 24 + zero));
4518 mix (a, b, val2);
4519 }
4520 return val2;
4521 }
4522 }
4523
4524 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4525 is ATTRIBUTE and its qualifiers are QUALS.
4526
4527 Record such modified types already made so we don't make duplicates. */
4528
4529 tree
4530 build_type_attribute_qual_variant (tree ttype, tree attribute, int quals)
4531 {
4532 if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype), attribute))
4533 {
4534 hashval_t hashcode = 0;
4535 tree ntype;
4536 enum tree_code code = TREE_CODE (ttype);
4537
4538 /* Building a distinct copy of a tagged type is inappropriate; it
4539 causes breakage in code that expects there to be a one-to-one
4540 relationship between a struct and its fields.
4541 build_duplicate_type is another solution (as used in
4542 handle_transparent_union_attribute), but that doesn't play well
4543 with the stronger C++ type identity model. */
4544 if (TREE_CODE (ttype) == RECORD_TYPE
4545 || TREE_CODE (ttype) == UNION_TYPE
4546 || TREE_CODE (ttype) == QUAL_UNION_TYPE
4547 || TREE_CODE (ttype) == ENUMERAL_TYPE)
4548 {
4549 warning (OPT_Wattributes,
4550 "ignoring attributes applied to %qT after definition",
4551 TYPE_MAIN_VARIANT (ttype));
4552 return build_qualified_type (ttype, quals);
4553 }
4554
4555 ttype = build_qualified_type (ttype, TYPE_UNQUALIFIED);
4556 ntype = build_distinct_type_copy (ttype);
4557
4558 TYPE_ATTRIBUTES (ntype) = attribute;
4559
4560 hashcode = iterative_hash_object (code, hashcode);
4561 if (TREE_TYPE (ntype))
4562 hashcode = iterative_hash_object (TYPE_HASH (TREE_TYPE (ntype)),
4563 hashcode);
4564 hashcode = attribute_hash_list (attribute, hashcode);
4565
4566 switch (TREE_CODE (ntype))
4567 {
4568 case FUNCTION_TYPE:
4569 hashcode = type_hash_list (TYPE_ARG_TYPES (ntype), hashcode);
4570 break;
4571 case ARRAY_TYPE:
4572 if (TYPE_DOMAIN (ntype))
4573 hashcode = iterative_hash_object (TYPE_HASH (TYPE_DOMAIN (ntype)),
4574 hashcode);
4575 break;
4576 case INTEGER_TYPE:
4577 hashcode = iterative_hash_object
4578 (TREE_INT_CST_LOW (TYPE_MAX_VALUE (ntype)), hashcode);
4579 hashcode = iterative_hash_object
4580 (TREE_INT_CST_HIGH (TYPE_MAX_VALUE (ntype)), hashcode);
4581 break;
4582 case REAL_TYPE:
4583 case FIXED_POINT_TYPE:
4584 {
4585 unsigned int precision = TYPE_PRECISION (ntype);
4586 hashcode = iterative_hash_object (precision, hashcode);
4587 }
4588 break;
4589 default:
4590 break;
4591 }
4592
4593 ntype = type_hash_canon (hashcode, ntype);
4594
4595 /* If the target-dependent attributes make NTYPE different from
4596 its canonical type, we will need to use structural equality
4597 checks for this type. */
4598 if (TYPE_STRUCTURAL_EQUALITY_P (ttype)
4599 || !comp_type_attributes (ntype, ttype))
4600 SET_TYPE_STRUCTURAL_EQUALITY (ntype);
4601 else if (TYPE_CANONICAL (ntype) == ntype)
4602 TYPE_CANONICAL (ntype) = TYPE_CANONICAL (ttype);
4603
4604 ttype = build_qualified_type (ntype, quals);
4605 }
4606 else if (TYPE_QUALS (ttype) != quals)
4607 ttype = build_qualified_type (ttype, quals);
4608
4609 return ttype;
4610 }
4611
4612 /* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are
4613 the same. */
4614
4615 static bool
4616 omp_declare_simd_clauses_equal (tree clauses1, tree clauses2)
4617 {
4618 tree cl1, cl2;
4619 for (cl1 = clauses1, cl2 = clauses2;
4620 cl1 && cl2;
4621 cl1 = OMP_CLAUSE_CHAIN (cl1), cl2 = OMP_CLAUSE_CHAIN (cl2))
4622 {
4623 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_CODE (cl2))
4624 return false;
4625 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_SIMDLEN)
4626 {
4627 if (simple_cst_equal (OMP_CLAUSE_DECL (cl1),
4628 OMP_CLAUSE_DECL (cl2)) != 1)
4629 return false;
4630 }
4631 switch (OMP_CLAUSE_CODE (cl1))
4632 {
4633 case OMP_CLAUSE_ALIGNED:
4634 if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1),
4635 OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2)) != 1)
4636 return false;
4637 break;
4638 case OMP_CLAUSE_LINEAR:
4639 if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1),
4640 OMP_CLAUSE_LINEAR_STEP (cl2)) != 1)
4641 return false;
4642 break;
4643 case OMP_CLAUSE_SIMDLEN:
4644 if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1),
4645 OMP_CLAUSE_SIMDLEN_EXPR (cl2)) != 1)
4646 return false;
4647 default:
4648 break;
4649 }
4650 }
4651 return true;
4652 }
4653
4654 /* Compare two constructor-element-type constants. Return 1 if the lists
4655 are known to be equal; otherwise return 0. */
4656
4657 static bool
4658 simple_cst_list_equal (const_tree l1, const_tree l2)
4659 {
4660 while (l1 != NULL_TREE && l2 != NULL_TREE)
4661 {
4662 if (simple_cst_equal (TREE_VALUE (l1), TREE_VALUE (l2)) != 1)
4663 return false;
4664
4665 l1 = TREE_CHAIN (l1);
4666 l2 = TREE_CHAIN (l2);
4667 }
4668
4669 return l1 == l2;
4670 }
4671
4672 /* Compare two attributes for their value identity. Return true if the
4673 attribute values are known to be equal; otherwise return false.
4674 */
4675
4676 static bool
4677 attribute_value_equal (const_tree attr1, const_tree attr2)
4678 {
4679 if (TREE_VALUE (attr1) == TREE_VALUE (attr2))
4680 return true;
4681
4682 if (TREE_VALUE (attr1) != NULL_TREE
4683 && TREE_CODE (TREE_VALUE (attr1)) == TREE_LIST
4684 && TREE_VALUE (attr2) != NULL
4685 && TREE_CODE (TREE_VALUE (attr2)) == TREE_LIST)
4686 return (simple_cst_list_equal (TREE_VALUE (attr1),
4687 TREE_VALUE (attr2)) == 1);
4688
4689 if ((flag_openmp || flag_openmp_simd)
4690 && TREE_VALUE (attr1) && TREE_VALUE (attr2)
4691 && TREE_CODE (TREE_VALUE (attr1)) == OMP_CLAUSE
4692 && TREE_CODE (TREE_VALUE (attr2)) == OMP_CLAUSE)
4693 return omp_declare_simd_clauses_equal (TREE_VALUE (attr1),
4694 TREE_VALUE (attr2));
4695
4696 return (simple_cst_equal (TREE_VALUE (attr1), TREE_VALUE (attr2)) == 1);
4697 }
4698
4699 /* Return 0 if the attributes for two types are incompatible, 1 if they
4700 are compatible, and 2 if they are nearly compatible (which causes a
4701 warning to be generated). */
4702 int
4703 comp_type_attributes (const_tree type1, const_tree type2)
4704 {
4705 const_tree a1 = TYPE_ATTRIBUTES (type1);
4706 const_tree a2 = TYPE_ATTRIBUTES (type2);
4707 const_tree a;
4708
4709 if (a1 == a2)
4710 return 1;
4711 for (a = a1; a != NULL_TREE; a = TREE_CHAIN (a))
4712 {
4713 const struct attribute_spec *as;
4714 const_tree attr;
4715
4716 as = lookup_attribute_spec (get_attribute_name (a));
4717 if (!as || as->affects_type_identity == false)
4718 continue;
4719
4720 attr = lookup_attribute (as->name, CONST_CAST_TREE (a2));
4721 if (!attr || !attribute_value_equal (a, attr))
4722 break;
4723 }
4724 if (!a)
4725 {
4726 for (a = a2; a != NULL_TREE; a = TREE_CHAIN (a))
4727 {
4728 const struct attribute_spec *as;
4729
4730 as = lookup_attribute_spec (get_attribute_name (a));
4731 if (!as || as->affects_type_identity == false)
4732 continue;
4733
4734 if (!lookup_attribute (as->name, CONST_CAST_TREE (a1)))
4735 break;
4736 /* We don't need to compare trees again, as we did this
4737 already in first loop. */
4738 }
4739 /* All types - affecting identity - are equal, so
4740 there is no need to call target hook for comparison. */
4741 if (!a)
4742 return 1;
4743 }
4744 /* As some type combinations - like default calling-convention - might
4745 be compatible, we have to call the target hook to get the final result. */
4746 return targetm.comp_type_attributes (type1, type2);
4747 }
4748
4749 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4750 is ATTRIBUTE.
4751
4752 Record such modified types already made so we don't make duplicates. */
4753
4754 tree
4755 build_type_attribute_variant (tree ttype, tree attribute)
4756 {
4757 return build_type_attribute_qual_variant (ttype, attribute,
4758 TYPE_QUALS (ttype));
4759 }
4760
4761
4762 /* Reset the expression *EXPR_P, a size or position.
4763
4764 ??? We could reset all non-constant sizes or positions. But it's cheap
4765 enough to not do so and refrain from adding workarounds to dwarf2out.c.
4766
4767 We need to reset self-referential sizes or positions because they cannot
4768 be gimplified and thus can contain a CALL_EXPR after the gimplification
4769 is finished, which will run afoul of LTO streaming. And they need to be
4770 reset to something essentially dummy but not constant, so as to preserve
4771 the properties of the object they are attached to. */
4772
4773 static inline void
4774 free_lang_data_in_one_sizepos (tree *expr_p)
4775 {
4776 tree expr = *expr_p;
4777 if (CONTAINS_PLACEHOLDER_P (expr))
4778 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
4779 }
4780
4781
4782 /* Reset all the fields in a binfo node BINFO. We only keep
4783 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
4784
4785 static void
4786 free_lang_data_in_binfo (tree binfo)
4787 {
4788 unsigned i;
4789 tree t;
4790
4791 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
4792
4793 BINFO_VIRTUALS (binfo) = NULL_TREE;
4794 BINFO_BASE_ACCESSES (binfo) = NULL;
4795 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
4796 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
4797
4798 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
4799 free_lang_data_in_binfo (t);
4800 }
4801
4802
4803 /* Reset all language specific information still present in TYPE. */
4804
4805 static void
4806 free_lang_data_in_type (tree type)
4807 {
4808 gcc_assert (TYPE_P (type));
4809
4810 /* Give the FE a chance to remove its own data first. */
4811 lang_hooks.free_lang_data (type);
4812
4813 TREE_LANG_FLAG_0 (type) = 0;
4814 TREE_LANG_FLAG_1 (type) = 0;
4815 TREE_LANG_FLAG_2 (type) = 0;
4816 TREE_LANG_FLAG_3 (type) = 0;
4817 TREE_LANG_FLAG_4 (type) = 0;
4818 TREE_LANG_FLAG_5 (type) = 0;
4819 TREE_LANG_FLAG_6 (type) = 0;
4820
4821 if (TREE_CODE (type) == FUNCTION_TYPE)
4822 {
4823 /* Remove the const and volatile qualifiers from arguments. The
4824 C++ front end removes them, but the C front end does not,
4825 leading to false ODR violation errors when merging two
4826 instances of the same function signature compiled by
4827 different front ends. */
4828 tree p;
4829
4830 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
4831 {
4832 tree arg_type = TREE_VALUE (p);
4833
4834 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
4835 {
4836 int quals = TYPE_QUALS (arg_type)
4837 & ~TYPE_QUAL_CONST
4838 & ~TYPE_QUAL_VOLATILE;
4839 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
4840 free_lang_data_in_type (TREE_VALUE (p));
4841 }
4842 }
4843 }
4844
4845 /* Remove members that are not actually FIELD_DECLs from the field
4846 list of an aggregate. These occur in C++. */
4847 if (RECORD_OR_UNION_TYPE_P (type))
4848 {
4849 tree prev, member;
4850
4851 /* Note that TYPE_FIELDS can be shared across distinct
4852 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is
4853 to be removed, we cannot set its TREE_CHAIN to NULL.
4854 Otherwise, we would not be able to find all the other fields
4855 in the other instances of this TREE_TYPE.
4856
4857 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */
4858 prev = NULL_TREE;
4859 member = TYPE_FIELDS (type);
4860 while (member)
4861 {
4862 if (TREE_CODE (member) == FIELD_DECL
4863 || TREE_CODE (member) == TYPE_DECL)
4864 {
4865 if (prev)
4866 TREE_CHAIN (prev) = member;
4867 else
4868 TYPE_FIELDS (type) = member;
4869 prev = member;
4870 }
4871
4872 member = TREE_CHAIN (member);
4873 }
4874
4875 if (prev)
4876 TREE_CHAIN (prev) = NULL_TREE;
4877 else
4878 TYPE_FIELDS (type) = NULL_TREE;
4879
4880 TYPE_METHODS (type) = NULL_TREE;
4881 if (TYPE_BINFO (type))
4882 free_lang_data_in_binfo (TYPE_BINFO (type));
4883 }
4884 else
4885 {
4886 /* For non-aggregate types, clear out the language slot (which
4887 overloads TYPE_BINFO). */
4888 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
4889
4890 if (INTEGRAL_TYPE_P (type)
4891 || SCALAR_FLOAT_TYPE_P (type)
4892 || FIXED_POINT_TYPE_P (type))
4893 {
4894 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
4895 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
4896 }
4897 }
4898
4899 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
4900 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
4901
4902 if (TYPE_CONTEXT (type)
4903 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
4904 {
4905 tree ctx = TYPE_CONTEXT (type);
4906 do
4907 {
4908 ctx = BLOCK_SUPERCONTEXT (ctx);
4909 }
4910 while (ctx && TREE_CODE (ctx) == BLOCK);
4911 TYPE_CONTEXT (type) = ctx;
4912 }
4913 }
4914
4915
4916 /* Return true if DECL may need an assembler name to be set. */
4917
4918 static inline bool
4919 need_assembler_name_p (tree decl)
4920 {
4921 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
4922 if (TREE_CODE (decl) != FUNCTION_DECL
4923 && TREE_CODE (decl) != VAR_DECL)
4924 return false;
4925
4926 /* If DECL already has its assembler name set, it does not need a
4927 new one. */
4928 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
4929 || DECL_ASSEMBLER_NAME_SET_P (decl))
4930 return false;
4931
4932 /* Abstract decls do not need an assembler name. */
4933 if (DECL_ABSTRACT (decl))
4934 return false;
4935
4936 /* For VAR_DECLs, only static, public and external symbols need an
4937 assembler name. */
4938 if (TREE_CODE (decl) == VAR_DECL
4939 && !TREE_STATIC (decl)
4940 && !TREE_PUBLIC (decl)
4941 && !DECL_EXTERNAL (decl))
4942 return false;
4943
4944 if (TREE_CODE (decl) == FUNCTION_DECL)
4945 {
4946 /* Do not set assembler name on builtins. Allow RTL expansion to
4947 decide whether to expand inline or via a regular call. */
4948 if (DECL_BUILT_IN (decl)
4949 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
4950 return false;
4951
4952 /* Functions represented in the callgraph need an assembler name. */
4953 if (cgraph_get_node (decl) != NULL)
4954 return true;
4955
4956 /* Unused and not public functions don't need an assembler name. */
4957 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
4958 return false;
4959 }
4960
4961 return true;
4962 }
4963
4964
4965 /* Reset all language specific information still present in symbol
4966 DECL. */
4967
4968 static void
4969 free_lang_data_in_decl (tree decl)
4970 {
4971 gcc_assert (DECL_P (decl));
4972
4973 /* Give the FE a chance to remove its own data first. */
4974 lang_hooks.free_lang_data (decl);
4975
4976 TREE_LANG_FLAG_0 (decl) = 0;
4977 TREE_LANG_FLAG_1 (decl) = 0;
4978 TREE_LANG_FLAG_2 (decl) = 0;
4979 TREE_LANG_FLAG_3 (decl) = 0;
4980 TREE_LANG_FLAG_4 (decl) = 0;
4981 TREE_LANG_FLAG_5 (decl) = 0;
4982 TREE_LANG_FLAG_6 (decl) = 0;
4983
4984 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
4985 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
4986 if (TREE_CODE (decl) == FIELD_DECL)
4987 {
4988 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
4989 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
4990 DECL_QUALIFIER (decl) = NULL_TREE;
4991 }
4992
4993 if (TREE_CODE (decl) == FUNCTION_DECL)
4994 {
4995 struct cgraph_node *node;
4996 if (!(node = cgraph_get_node (decl))
4997 || (!node->definition && !node->clones))
4998 {
4999 if (node)
5000 cgraph_release_function_body (node);
5001 else
5002 {
5003 release_function_body (decl);
5004 DECL_ARGUMENTS (decl) = NULL;
5005 DECL_RESULT (decl) = NULL;
5006 DECL_INITIAL (decl) = error_mark_node;
5007 }
5008 }
5009 if (gimple_has_body_p (decl))
5010 {
5011 tree t;
5012
5013 /* If DECL has a gimple body, then the context for its
5014 arguments must be DECL. Otherwise, it doesn't really
5015 matter, as we will not be emitting any code for DECL. In
5016 general, there may be other instances of DECL created by
5017 the front end and since PARM_DECLs are generally shared,
5018 their DECL_CONTEXT changes as the replicas of DECL are
5019 created. The only time where DECL_CONTEXT is important
5020 is for the FUNCTION_DECLs that have a gimple body (since
5021 the PARM_DECL will be used in the function's body). */
5022 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5023 DECL_CONTEXT (t) = decl;
5024 }
5025
5026 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5027 At this point, it is not needed anymore. */
5028 DECL_SAVED_TREE (decl) = NULL_TREE;
5029
5030 /* Clear the abstract origin if it refers to a method. Otherwise
5031 dwarf2out.c will ICE as we clear TYPE_METHODS and thus the
5032 origin will not be output correctly. */
5033 if (DECL_ABSTRACT_ORIGIN (decl)
5034 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5035 && RECORD_OR_UNION_TYPE_P
5036 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5037 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5038
5039 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5040 DECL_VINDEX referring to itself into a vtable slot number as it
5041 should. Happens with functions that are copied and then forgotten
5042 about. Just clear it, it won't matter anymore. */
5043 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl)))
5044 DECL_VINDEX (decl) = NULL_TREE;
5045 }
5046 else if (TREE_CODE (decl) == VAR_DECL)
5047 {
5048 if ((DECL_EXTERNAL (decl)
5049 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5050 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5051 DECL_INITIAL (decl) = NULL_TREE;
5052 }
5053 else if (TREE_CODE (decl) == TYPE_DECL
5054 || TREE_CODE (decl) == FIELD_DECL)
5055 DECL_INITIAL (decl) = NULL_TREE;
5056 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5057 && DECL_INITIAL (decl)
5058 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5059 {
5060 /* Strip builtins from the translation-unit BLOCK. We still have targets
5061 without builtin_decl_explicit support and also builtins are shared
5062 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5063 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5064 while (*nextp)
5065 {
5066 tree var = *nextp;
5067 if (TREE_CODE (var) == FUNCTION_DECL
5068 && DECL_BUILT_IN (var))
5069 *nextp = TREE_CHAIN (var);
5070 else
5071 nextp = &TREE_CHAIN (var);
5072 }
5073 }
5074 }
5075
5076
5077 /* Data used when collecting DECLs and TYPEs for language data removal. */
5078
5079 struct free_lang_data_d
5080 {
5081 /* Worklist to avoid excessive recursion. */
5082 vec<tree> worklist;
5083
5084 /* Set of traversed objects. Used to avoid duplicate visits. */
5085 struct pointer_set_t *pset;
5086
5087 /* Array of symbols to process with free_lang_data_in_decl. */
5088 vec<tree> decls;
5089
5090 /* Array of types to process with free_lang_data_in_type. */
5091 vec<tree> types;
5092 };
5093
5094
5095 /* Save all language fields needed to generate proper debug information
5096 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5097
5098 static void
5099 save_debug_info_for_decl (tree t)
5100 {
5101 /*struct saved_debug_info_d *sdi;*/
5102
5103 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t));
5104
5105 /* FIXME. Partial implementation for saving debug info removed. */
5106 }
5107
5108
5109 /* Save all language fields needed to generate proper debug information
5110 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5111
5112 static void
5113 save_debug_info_for_type (tree t)
5114 {
5115 /*struct saved_debug_info_d *sdi;*/
5116
5117 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t));
5118
5119 /* FIXME. Partial implementation for saving debug info removed. */
5120 }
5121
5122
5123 /* Add type or decl T to one of the list of tree nodes that need their
5124 language data removed. The lists are held inside FLD. */
5125
5126 static void
5127 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5128 {
5129 if (DECL_P (t))
5130 {
5131 fld->decls.safe_push (t);
5132 if (debug_info_level > DINFO_LEVEL_TERSE)
5133 save_debug_info_for_decl (t);
5134 }
5135 else if (TYPE_P (t))
5136 {
5137 fld->types.safe_push (t);
5138 if (debug_info_level > DINFO_LEVEL_TERSE)
5139 save_debug_info_for_type (t);
5140 }
5141 else
5142 gcc_unreachable ();
5143 }
5144
5145 /* Push tree node T into FLD->WORKLIST. */
5146
5147 static inline void
5148 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5149 {
5150 if (t && !is_lang_specific (t) && !pointer_set_contains (fld->pset, t))
5151 fld->worklist.safe_push ((t));
5152 }
5153
5154
5155 /* Operand callback helper for free_lang_data_in_node. *TP is the
5156 subtree operand being considered. */
5157
5158 static tree
5159 find_decls_types_r (tree *tp, int *ws, void *data)
5160 {
5161 tree t = *tp;
5162 struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5163
5164 if (TREE_CODE (t) == TREE_LIST)
5165 return NULL_TREE;
5166
5167 /* Language specific nodes will be removed, so there is no need
5168 to gather anything under them. */
5169 if (is_lang_specific (t))
5170 {
5171 *ws = 0;
5172 return NULL_TREE;
5173 }
5174
5175 if (DECL_P (t))
5176 {
5177 /* Note that walk_tree does not traverse every possible field in
5178 decls, so we have to do our own traversals here. */
5179 add_tree_to_fld_list (t, fld);
5180
5181 fld_worklist_push (DECL_NAME (t), fld);
5182 fld_worklist_push (DECL_CONTEXT (t), fld);
5183 fld_worklist_push (DECL_SIZE (t), fld);
5184 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5185
5186 /* We are going to remove everything under DECL_INITIAL for
5187 TYPE_DECLs. No point walking them. */
5188 if (TREE_CODE (t) != TYPE_DECL)
5189 fld_worklist_push (DECL_INITIAL (t), fld);
5190
5191 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5192 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5193
5194 if (TREE_CODE (t) == FUNCTION_DECL)
5195 {
5196 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5197 fld_worklist_push (DECL_RESULT (t), fld);
5198 }
5199 else if (TREE_CODE (t) == TYPE_DECL)
5200 {
5201 fld_worklist_push (DECL_ARGUMENT_FLD (t), fld);
5202 fld_worklist_push (DECL_VINDEX (t), fld);
5203 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
5204 }
5205 else if (TREE_CODE (t) == FIELD_DECL)
5206 {
5207 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5208 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5209 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5210 fld_worklist_push (DECL_FCONTEXT (t), fld);
5211 }
5212 else if (TREE_CODE (t) == VAR_DECL)
5213 {
5214 fld_worklist_push (DECL_SECTION_NAME (t), fld);
5215 fld_worklist_push (DECL_COMDAT_GROUP (t), fld);
5216 }
5217
5218 if ((TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
5219 && DECL_HAS_VALUE_EXPR_P (t))
5220 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5221
5222 if (TREE_CODE (t) != FIELD_DECL
5223 && TREE_CODE (t) != TYPE_DECL)
5224 fld_worklist_push (TREE_CHAIN (t), fld);
5225 *ws = 0;
5226 }
5227 else if (TYPE_P (t))
5228 {
5229 /* Note that walk_tree does not traverse every possible field in
5230 types, so we have to do our own traversals here. */
5231 add_tree_to_fld_list (t, fld);
5232
5233 if (!RECORD_OR_UNION_TYPE_P (t))
5234 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5235 fld_worklist_push (TYPE_SIZE (t), fld);
5236 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5237 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5238 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5239 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5240 fld_worklist_push (TYPE_NAME (t), fld);
5241 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5242 them and thus do not and want not to reach unused pointer types
5243 this way. */
5244 if (!POINTER_TYPE_P (t))
5245 fld_worklist_push (TYPE_MINVAL (t), fld);
5246 if (!RECORD_OR_UNION_TYPE_P (t))
5247 fld_worklist_push (TYPE_MAXVAL (t), fld);
5248 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5249 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5250 do not and want not to reach unused variants this way. */
5251 if (TYPE_CONTEXT (t))
5252 {
5253 tree ctx = TYPE_CONTEXT (t);
5254 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5255 So push that instead. */
5256 while (ctx && TREE_CODE (ctx) == BLOCK)
5257 ctx = BLOCK_SUPERCONTEXT (ctx);
5258 fld_worklist_push (ctx, fld);
5259 }
5260 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5261 and want not to reach unused types this way. */
5262
5263 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5264 {
5265 unsigned i;
5266 tree tem;
5267 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5268 fld_worklist_push (TREE_TYPE (tem), fld);
5269 tem = BINFO_VIRTUALS (TYPE_BINFO (t));
5270 if (tem
5271 /* The Java FE overloads BINFO_VIRTUALS for its own purpose. */
5272 && TREE_CODE (tem) == TREE_LIST)
5273 do
5274 {
5275 fld_worklist_push (TREE_VALUE (tem), fld);
5276 tem = TREE_CHAIN (tem);
5277 }
5278 while (tem);
5279 }
5280 if (RECORD_OR_UNION_TYPE_P (t))
5281 {
5282 tree tem;
5283 /* Push all TYPE_FIELDS - there can be interleaving interesting
5284 and non-interesting things. */
5285 tem = TYPE_FIELDS (t);
5286 while (tem)
5287 {
5288 if (TREE_CODE (tem) == FIELD_DECL
5289 || TREE_CODE (tem) == TYPE_DECL)
5290 fld_worklist_push (tem, fld);
5291 tem = TREE_CHAIN (tem);
5292 }
5293 }
5294
5295 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5296 *ws = 0;
5297 }
5298 else if (TREE_CODE (t) == BLOCK)
5299 {
5300 tree tem;
5301 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
5302 fld_worklist_push (tem, fld);
5303 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5304 fld_worklist_push (tem, fld);
5305 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5306 }
5307
5308 if (TREE_CODE (t) != IDENTIFIER_NODE
5309 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5310 fld_worklist_push (TREE_TYPE (t), fld);
5311
5312 return NULL_TREE;
5313 }
5314
5315
5316 /* Find decls and types in T. */
5317
5318 static void
5319 find_decls_types (tree t, struct free_lang_data_d *fld)
5320 {
5321 while (1)
5322 {
5323 if (!pointer_set_contains (fld->pset, t))
5324 walk_tree (&t, find_decls_types_r, fld, fld->pset);
5325 if (fld->worklist.is_empty ())
5326 break;
5327 t = fld->worklist.pop ();
5328 }
5329 }
5330
5331 /* Translate all the types in LIST with the corresponding runtime
5332 types. */
5333
5334 static tree
5335 get_eh_types_for_runtime (tree list)
5336 {
5337 tree head, prev;
5338
5339 if (list == NULL_TREE)
5340 return NULL_TREE;
5341
5342 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5343 prev = head;
5344 list = TREE_CHAIN (list);
5345 while (list)
5346 {
5347 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5348 TREE_CHAIN (prev) = n;
5349 prev = TREE_CHAIN (prev);
5350 list = TREE_CHAIN (list);
5351 }
5352
5353 return head;
5354 }
5355
5356
5357 /* Find decls and types referenced in EH region R and store them in
5358 FLD->DECLS and FLD->TYPES. */
5359
5360 static void
5361 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5362 {
5363 switch (r->type)
5364 {
5365 case ERT_CLEANUP:
5366 break;
5367
5368 case ERT_TRY:
5369 {
5370 eh_catch c;
5371
5372 /* The types referenced in each catch must first be changed to the
5373 EH types used at runtime. This removes references to FE types
5374 in the region. */
5375 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5376 {
5377 c->type_list = get_eh_types_for_runtime (c->type_list);
5378 walk_tree (&c->type_list, find_decls_types_r, fld, fld->pset);
5379 }
5380 }
5381 break;
5382
5383 case ERT_ALLOWED_EXCEPTIONS:
5384 r->u.allowed.type_list
5385 = get_eh_types_for_runtime (r->u.allowed.type_list);
5386 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, fld->pset);
5387 break;
5388
5389 case ERT_MUST_NOT_THROW:
5390 walk_tree (&r->u.must_not_throw.failure_decl,
5391 find_decls_types_r, fld, fld->pset);
5392 break;
5393 }
5394 }
5395
5396
5397 /* Find decls and types referenced in cgraph node N and store them in
5398 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5399 look for *every* kind of DECL and TYPE node reachable from N,
5400 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5401 NAMESPACE_DECLs, etc). */
5402
5403 static void
5404 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5405 {
5406 basic_block bb;
5407 struct function *fn;
5408 unsigned ix;
5409 tree t;
5410
5411 find_decls_types (n->decl, fld);
5412
5413 if (!gimple_has_body_p (n->decl))
5414 return;
5415
5416 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5417
5418 fn = DECL_STRUCT_FUNCTION (n->decl);
5419
5420 /* Traverse locals. */
5421 FOR_EACH_LOCAL_DECL (fn, ix, t)
5422 find_decls_types (t, fld);
5423
5424 /* Traverse EH regions in FN. */
5425 {
5426 eh_region r;
5427 FOR_ALL_EH_REGION_FN (r, fn)
5428 find_decls_types_in_eh_region (r, fld);
5429 }
5430
5431 /* Traverse every statement in FN. */
5432 FOR_EACH_BB_FN (bb, fn)
5433 {
5434 gimple_stmt_iterator si;
5435 unsigned i;
5436
5437 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
5438 {
5439 gimple phi = gsi_stmt (si);
5440
5441 for (i = 0; i < gimple_phi_num_args (phi); i++)
5442 {
5443 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5444 find_decls_types (*arg_p, fld);
5445 }
5446 }
5447
5448 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5449 {
5450 gimple stmt = gsi_stmt (si);
5451
5452 if (is_gimple_call (stmt))
5453 find_decls_types (gimple_call_fntype (stmt), fld);
5454
5455 for (i = 0; i < gimple_num_ops (stmt); i++)
5456 {
5457 tree arg = gimple_op (stmt, i);
5458 find_decls_types (arg, fld);
5459 }
5460 }
5461 }
5462 }
5463
5464
5465 /* Find decls and types referenced in varpool node N and store them in
5466 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5467 look for *every* kind of DECL and TYPE node reachable from N,
5468 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5469 NAMESPACE_DECLs, etc). */
5470
5471 static void
5472 find_decls_types_in_var (struct varpool_node *v, struct free_lang_data_d *fld)
5473 {
5474 find_decls_types (v->decl, fld);
5475 }
5476
5477 /* If T needs an assembler name, have one created for it. */
5478
5479 void
5480 assign_assembler_name_if_neeeded (tree t)
5481 {
5482 if (need_assembler_name_p (t))
5483 {
5484 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5485 diagnostics that use input_location to show locus
5486 information. The problem here is that, at this point,
5487 input_location is generally anchored to the end of the file
5488 (since the parser is long gone), so we don't have a good
5489 position to pin it to.
5490
5491 To alleviate this problem, this uses the location of T's
5492 declaration. Examples of this are
5493 testsuite/g++.dg/template/cond2.C and
5494 testsuite/g++.dg/template/pr35240.C. */
5495 location_t saved_location = input_location;
5496 input_location = DECL_SOURCE_LOCATION (t);
5497
5498 decl_assembler_name (t);
5499
5500 input_location = saved_location;
5501 }
5502 }
5503
5504
5505 /* Free language specific information for every operand and expression
5506 in every node of the call graph. This process operates in three stages:
5507
5508 1- Every callgraph node and varpool node is traversed looking for
5509 decls and types embedded in them. This is a more exhaustive
5510 search than that done by find_referenced_vars, because it will
5511 also collect individual fields, decls embedded in types, etc.
5512
5513 2- All the decls found are sent to free_lang_data_in_decl.
5514
5515 3- All the types found are sent to free_lang_data_in_type.
5516
5517 The ordering between decls and types is important because
5518 free_lang_data_in_decl sets assembler names, which includes
5519 mangling. So types cannot be freed up until assembler names have
5520 been set up. */
5521
5522 static void
5523 free_lang_data_in_cgraph (void)
5524 {
5525 struct cgraph_node *n;
5526 struct varpool_node *v;
5527 struct free_lang_data_d fld;
5528 tree t;
5529 unsigned i;
5530 alias_pair *p;
5531
5532 /* Initialize sets and arrays to store referenced decls and types. */
5533 fld.pset = pointer_set_create ();
5534 fld.worklist.create (0);
5535 fld.decls.create (100);
5536 fld.types.create (100);
5537
5538 /* Find decls and types in the body of every function in the callgraph. */
5539 FOR_EACH_FUNCTION (n)
5540 find_decls_types_in_node (n, &fld);
5541
5542 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
5543 find_decls_types (p->decl, &fld);
5544
5545 /* Find decls and types in every varpool symbol. */
5546 FOR_EACH_VARIABLE (v)
5547 find_decls_types_in_var (v, &fld);
5548
5549 /* Set the assembler name on every decl found. We need to do this
5550 now because free_lang_data_in_decl will invalidate data needed
5551 for mangling. This breaks mangling on interdependent decls. */
5552 FOR_EACH_VEC_ELT (fld.decls, i, t)
5553 assign_assembler_name_if_neeeded (t);
5554
5555 /* Traverse every decl found freeing its language data. */
5556 FOR_EACH_VEC_ELT (fld.decls, i, t)
5557 free_lang_data_in_decl (t);
5558
5559 /* Traverse every type found freeing its language data. */
5560 FOR_EACH_VEC_ELT (fld.types, i, t)
5561 free_lang_data_in_type (t);
5562
5563 pointer_set_destroy (fld.pset);
5564 fld.worklist.release ();
5565 fld.decls.release ();
5566 fld.types.release ();
5567 }
5568
5569
5570 /* Free resources that are used by FE but are not needed once they are done. */
5571
5572 static unsigned
5573 free_lang_data (void)
5574 {
5575 unsigned i;
5576
5577 /* If we are the LTO frontend we have freed lang-specific data already. */
5578 if (in_lto_p
5579 || !flag_generate_lto)
5580 return 0;
5581
5582 /* Allocate and assign alias sets to the standard integer types
5583 while the slots are still in the way the frontends generated them. */
5584 for (i = 0; i < itk_none; ++i)
5585 if (integer_types[i])
5586 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
5587
5588 /* Traverse the IL resetting language specific information for
5589 operands, expressions, etc. */
5590 free_lang_data_in_cgraph ();
5591
5592 /* Create gimple variants for common types. */
5593 ptrdiff_type_node = integer_type_node;
5594 fileptr_type_node = ptr_type_node;
5595
5596 /* Reset some langhooks. Do not reset types_compatible_p, it may
5597 still be used indirectly via the get_alias_set langhook. */
5598 lang_hooks.dwarf_name = lhd_dwarf_name;
5599 lang_hooks.decl_printable_name = gimple_decl_printable_name;
5600 /* We do not want the default decl_assembler_name implementation,
5601 rather if we have fixed everything we want a wrapper around it
5602 asserting that all non-local symbols already got their assembler
5603 name and only produce assembler names for local symbols. Or rather
5604 make sure we never call decl_assembler_name on local symbols and
5605 devise a separate, middle-end private scheme for it. */
5606
5607 /* Reset diagnostic machinery. */
5608 tree_diagnostics_defaults (global_dc);
5609
5610 return 0;
5611 }
5612
5613
5614 namespace {
5615
5616 const pass_data pass_data_ipa_free_lang_data =
5617 {
5618 SIMPLE_IPA_PASS, /* type */
5619 "*free_lang_data", /* name */
5620 OPTGROUP_NONE, /* optinfo_flags */
5621 false, /* has_gate */
5622 true, /* has_execute */
5623 TV_IPA_FREE_LANG_DATA, /* tv_id */
5624 0, /* properties_required */
5625 0, /* properties_provided */
5626 0, /* properties_destroyed */
5627 0, /* todo_flags_start */
5628 0, /* todo_flags_finish */
5629 };
5630
5631 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
5632 {
5633 public:
5634 pass_ipa_free_lang_data (gcc::context *ctxt)
5635 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
5636 {}
5637
5638 /* opt_pass methods: */
5639 unsigned int execute () { return free_lang_data (); }
5640
5641 }; // class pass_ipa_free_lang_data
5642
5643 } // anon namespace
5644
5645 simple_ipa_opt_pass *
5646 make_pass_ipa_free_lang_data (gcc::context *ctxt)
5647 {
5648 return new pass_ipa_free_lang_data (ctxt);
5649 }
5650
5651 /* The backbone of is_attribute_p(). ATTR_LEN is the string length of
5652 ATTR_NAME. Also used internally by remove_attribute(). */
5653 bool
5654 private_is_attribute_p (const char *attr_name, size_t attr_len, const_tree ident)
5655 {
5656 size_t ident_len = IDENTIFIER_LENGTH (ident);
5657
5658 if (ident_len == attr_len)
5659 {
5660 if (strcmp (attr_name, IDENTIFIER_POINTER (ident)) == 0)
5661 return true;
5662 }
5663 else if (ident_len == attr_len + 4)
5664 {
5665 /* There is the possibility that ATTR is 'text' and IDENT is
5666 '__text__'. */
5667 const char *p = IDENTIFIER_POINTER (ident);
5668 if (p[0] == '_' && p[1] == '_'
5669 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5670 && strncmp (attr_name, p + 2, attr_len) == 0)
5671 return true;
5672 }
5673
5674 return false;
5675 }
5676
5677 /* The backbone of lookup_attribute(). ATTR_LEN is the string length
5678 of ATTR_NAME, and LIST is not NULL_TREE. */
5679 tree
5680 private_lookup_attribute (const char *attr_name, size_t attr_len, tree list)
5681 {
5682 while (list)
5683 {
5684 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5685
5686 if (ident_len == attr_len)
5687 {
5688 if (!strcmp (attr_name,
5689 IDENTIFIER_POINTER (get_attribute_name (list))))
5690 break;
5691 }
5692 /* TODO: If we made sure that attributes were stored in the
5693 canonical form without '__...__' (ie, as in 'text' as opposed
5694 to '__text__') then we could avoid the following case. */
5695 else if (ident_len == attr_len + 4)
5696 {
5697 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5698 if (p[0] == '_' && p[1] == '_'
5699 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5700 && strncmp (attr_name, p + 2, attr_len) == 0)
5701 break;
5702 }
5703 list = TREE_CHAIN (list);
5704 }
5705
5706 return list;
5707 }
5708
5709 /* A variant of lookup_attribute() that can be used with an identifier
5710 as the first argument, and where the identifier can be either
5711 'text' or '__text__'.
5712
5713 Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST,
5714 return a pointer to the attribute's list element if the attribute
5715 is part of the list, or NULL_TREE if not found. If the attribute
5716 appears more than once, this only returns the first occurrence; the
5717 TREE_CHAIN of the return value should be passed back in if further
5718 occurrences are wanted. ATTR_IDENTIFIER must be an identifier but
5719 can be in the form 'text' or '__text__'. */
5720 static tree
5721 lookup_ident_attribute (tree attr_identifier, tree list)
5722 {
5723 gcc_checking_assert (TREE_CODE (attr_identifier) == IDENTIFIER_NODE);
5724
5725 while (list)
5726 {
5727 gcc_checking_assert (TREE_CODE (get_attribute_name (list))
5728 == IDENTIFIER_NODE);
5729
5730 /* Identifiers can be compared directly for equality. */
5731 if (attr_identifier == get_attribute_name (list))
5732 break;
5733
5734 /* If they are not equal, they may still be one in the form
5735 'text' while the other one is in the form '__text__'. TODO:
5736 If we were storing attributes in normalized 'text' form, then
5737 this could all go away and we could take full advantage of
5738 the fact that we're comparing identifiers. :-) */
5739 {
5740 size_t attr_len = IDENTIFIER_LENGTH (attr_identifier);
5741 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5742
5743 if (ident_len == attr_len + 4)
5744 {
5745 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5746 const char *q = IDENTIFIER_POINTER (attr_identifier);
5747 if (p[0] == '_' && p[1] == '_'
5748 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5749 && strncmp (q, p + 2, attr_len) == 0)
5750 break;
5751 }
5752 else if (ident_len + 4 == attr_len)
5753 {
5754 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5755 const char *q = IDENTIFIER_POINTER (attr_identifier);
5756 if (q[0] == '_' && q[1] == '_'
5757 && q[attr_len - 2] == '_' && q[attr_len - 1] == '_'
5758 && strncmp (q + 2, p, ident_len) == 0)
5759 break;
5760 }
5761 }
5762 list = TREE_CHAIN (list);
5763 }
5764
5765 return list;
5766 }
5767
5768 /* Remove any instances of attribute ATTR_NAME in LIST and return the
5769 modified list. */
5770
5771 tree
5772 remove_attribute (const char *attr_name, tree list)
5773 {
5774 tree *p;
5775 size_t attr_len = strlen (attr_name);
5776
5777 gcc_checking_assert (attr_name[0] != '_');
5778
5779 for (p = &list; *p; )
5780 {
5781 tree l = *p;
5782 /* TODO: If we were storing attributes in normalized form, here
5783 we could use a simple strcmp(). */
5784 if (private_is_attribute_p (attr_name, attr_len, get_attribute_name (l)))
5785 *p = TREE_CHAIN (l);
5786 else
5787 p = &TREE_CHAIN (l);
5788 }
5789
5790 return list;
5791 }
5792
5793 /* Return an attribute list that is the union of a1 and a2. */
5794
5795 tree
5796 merge_attributes (tree a1, tree a2)
5797 {
5798 tree attributes;
5799
5800 /* Either one unset? Take the set one. */
5801
5802 if ((attributes = a1) == 0)
5803 attributes = a2;
5804
5805 /* One that completely contains the other? Take it. */
5806
5807 else if (a2 != 0 && ! attribute_list_contained (a1, a2))
5808 {
5809 if (attribute_list_contained (a2, a1))
5810 attributes = a2;
5811 else
5812 {
5813 /* Pick the longest list, and hang on the other list. */
5814
5815 if (list_length (a1) < list_length (a2))
5816 attributes = a2, a2 = a1;
5817
5818 for (; a2 != 0; a2 = TREE_CHAIN (a2))
5819 {
5820 tree a;
5821 for (a = lookup_ident_attribute (get_attribute_name (a2),
5822 attributes);
5823 a != NULL_TREE && !attribute_value_equal (a, a2);
5824 a = lookup_ident_attribute (get_attribute_name (a2),
5825 TREE_CHAIN (a)))
5826 ;
5827 if (a == NULL_TREE)
5828 {
5829 a1 = copy_node (a2);
5830 TREE_CHAIN (a1) = attributes;
5831 attributes = a1;
5832 }
5833 }
5834 }
5835 }
5836 return attributes;
5837 }
5838
5839 /* Given types T1 and T2, merge their attributes and return
5840 the result. */
5841
5842 tree
5843 merge_type_attributes (tree t1, tree t2)
5844 {
5845 return merge_attributes (TYPE_ATTRIBUTES (t1),
5846 TYPE_ATTRIBUTES (t2));
5847 }
5848
5849 /* Given decls OLDDECL and NEWDECL, merge their attributes and return
5850 the result. */
5851
5852 tree
5853 merge_decl_attributes (tree olddecl, tree newdecl)
5854 {
5855 return merge_attributes (DECL_ATTRIBUTES (olddecl),
5856 DECL_ATTRIBUTES (newdecl));
5857 }
5858
5859 #if TARGET_DLLIMPORT_DECL_ATTRIBUTES
5860
5861 /* Specialization of merge_decl_attributes for various Windows targets.
5862
5863 This handles the following situation:
5864
5865 __declspec (dllimport) int foo;
5866 int foo;
5867
5868 The second instance of `foo' nullifies the dllimport. */
5869
5870 tree
5871 merge_dllimport_decl_attributes (tree old, tree new_tree)
5872 {
5873 tree a;
5874 int delete_dllimport_p = 1;
5875
5876 /* What we need to do here is remove from `old' dllimport if it doesn't
5877 appear in `new'. dllimport behaves like extern: if a declaration is
5878 marked dllimport and a definition appears later, then the object
5879 is not dllimport'd. We also remove a `new' dllimport if the old list
5880 contains dllexport: dllexport always overrides dllimport, regardless
5881 of the order of declaration. */
5882 if (!VAR_OR_FUNCTION_DECL_P (new_tree))
5883 delete_dllimport_p = 0;
5884 else if (DECL_DLLIMPORT_P (new_tree)
5885 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old)))
5886 {
5887 DECL_DLLIMPORT_P (new_tree) = 0;
5888 warning (OPT_Wattributes, "%q+D already declared with dllexport attribute: "
5889 "dllimport ignored", new_tree);
5890 }
5891 else if (DECL_DLLIMPORT_P (old) && !DECL_DLLIMPORT_P (new_tree))
5892 {
5893 /* Warn about overriding a symbol that has already been used, e.g.:
5894 extern int __attribute__ ((dllimport)) foo;
5895 int* bar () {return &foo;}
5896 int foo;
5897 */
5898 if (TREE_USED (old))
5899 {
5900 warning (0, "%q+D redeclared without dllimport attribute "
5901 "after being referenced with dll linkage", new_tree);
5902 /* If we have used a variable's address with dllimport linkage,
5903 keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the
5904 decl may already have had TREE_CONSTANT computed.
5905 We still remove the attribute so that assembler code refers
5906 to '&foo rather than '_imp__foo'. */
5907 if (TREE_CODE (old) == VAR_DECL && TREE_ADDRESSABLE (old))
5908 DECL_DLLIMPORT_P (new_tree) = 1;
5909 }
5910
5911 /* Let an inline definition silently override the external reference,
5912 but otherwise warn about attribute inconsistency. */
5913 else if (TREE_CODE (new_tree) == VAR_DECL
5914 || !DECL_DECLARED_INLINE_P (new_tree))
5915 warning (OPT_Wattributes, "%q+D redeclared without dllimport attribute: "
5916 "previous dllimport ignored", new_tree);
5917 }
5918 else
5919 delete_dllimport_p = 0;
5920
5921 a = merge_attributes (DECL_ATTRIBUTES (old), DECL_ATTRIBUTES (new_tree));
5922
5923 if (delete_dllimport_p)
5924 a = remove_attribute ("dllimport", a);
5925
5926 return a;
5927 }
5928
5929 /* Handle a "dllimport" or "dllexport" attribute; arguments as in
5930 struct attribute_spec.handler. */
5931
5932 tree
5933 handle_dll_attribute (tree * pnode, tree name, tree args, int flags,
5934 bool *no_add_attrs)
5935 {
5936 tree node = *pnode;
5937 bool is_dllimport;
5938
5939 /* These attributes may apply to structure and union types being created,
5940 but otherwise should pass to the declaration involved. */
5941 if (!DECL_P (node))
5942 {
5943 if (flags & ((int) ATTR_FLAG_DECL_NEXT | (int) ATTR_FLAG_FUNCTION_NEXT
5944 | (int) ATTR_FLAG_ARRAY_NEXT))
5945 {
5946 *no_add_attrs = true;
5947 return tree_cons (name, args, NULL_TREE);
5948 }
5949 if (TREE_CODE (node) == RECORD_TYPE
5950 || TREE_CODE (node) == UNION_TYPE)
5951 {
5952 node = TYPE_NAME (node);
5953 if (!node)
5954 return NULL_TREE;
5955 }
5956 else
5957 {
5958 warning (OPT_Wattributes, "%qE attribute ignored",
5959 name);
5960 *no_add_attrs = true;
5961 return NULL_TREE;
5962 }
5963 }
5964
5965 if (TREE_CODE (node) != FUNCTION_DECL
5966 && TREE_CODE (node) != VAR_DECL
5967 && TREE_CODE (node) != TYPE_DECL)
5968 {
5969 *no_add_attrs = true;
5970 warning (OPT_Wattributes, "%qE attribute ignored",
5971 name);
5972 return NULL_TREE;
5973 }
5974
5975 if (TREE_CODE (node) == TYPE_DECL
5976 && TREE_CODE (TREE_TYPE (node)) != RECORD_TYPE
5977 && TREE_CODE (TREE_TYPE (node)) != UNION_TYPE)
5978 {
5979 *no_add_attrs = true;
5980 warning (OPT_Wattributes, "%qE attribute ignored",
5981 name);
5982 return NULL_TREE;
5983 }
5984
5985 is_dllimport = is_attribute_p ("dllimport", name);
5986
5987 /* Report error on dllimport ambiguities seen now before they cause
5988 any damage. */
5989 if (is_dllimport)
5990 {
5991 /* Honor any target-specific overrides. */
5992 if (!targetm.valid_dllimport_attribute_p (node))
5993 *no_add_attrs = true;
5994
5995 else if (TREE_CODE (node) == FUNCTION_DECL
5996 && DECL_DECLARED_INLINE_P (node))
5997 {
5998 warning (OPT_Wattributes, "inline function %q+D declared as "
5999 " dllimport: attribute ignored", node);
6000 *no_add_attrs = true;
6001 }
6002 /* Like MS, treat definition of dllimported variables and
6003 non-inlined functions on declaration as syntax errors. */
6004 else if (TREE_CODE (node) == FUNCTION_DECL && DECL_INITIAL (node))
6005 {
6006 error ("function %q+D definition is marked dllimport", node);
6007 *no_add_attrs = true;
6008 }
6009
6010 else if (TREE_CODE (node) == VAR_DECL)
6011 {
6012 if (DECL_INITIAL (node))
6013 {
6014 error ("variable %q+D definition is marked dllimport",
6015 node);
6016 *no_add_attrs = true;
6017 }
6018
6019 /* `extern' needn't be specified with dllimport.
6020 Specify `extern' now and hope for the best. Sigh. */
6021 DECL_EXTERNAL (node) = 1;
6022 /* Also, implicitly give dllimport'd variables declared within
6023 a function global scope, unless declared static. */
6024 if (current_function_decl != NULL_TREE && !TREE_STATIC (node))
6025 TREE_PUBLIC (node) = 1;
6026 }
6027
6028 if (*no_add_attrs == false)
6029 DECL_DLLIMPORT_P (node) = 1;
6030 }
6031 else if (TREE_CODE (node) == FUNCTION_DECL
6032 && DECL_DECLARED_INLINE_P (node)
6033 && flag_keep_inline_dllexport)
6034 /* An exported function, even if inline, must be emitted. */
6035 DECL_EXTERNAL (node) = 0;
6036
6037 /* Report error if symbol is not accessible at global scope. */
6038 if (!TREE_PUBLIC (node)
6039 && (TREE_CODE (node) == VAR_DECL
6040 || TREE_CODE (node) == FUNCTION_DECL))
6041 {
6042 error ("external linkage required for symbol %q+D because of "
6043 "%qE attribute", node, name);
6044 *no_add_attrs = true;
6045 }
6046
6047 /* A dllexport'd entity must have default visibility so that other
6048 program units (shared libraries or the main executable) can see
6049 it. A dllimport'd entity must have default visibility so that
6050 the linker knows that undefined references within this program
6051 unit can be resolved by the dynamic linker. */
6052 if (!*no_add_attrs)
6053 {
6054 if (DECL_VISIBILITY_SPECIFIED (node)
6055 && DECL_VISIBILITY (node) != VISIBILITY_DEFAULT)
6056 error ("%qE implies default visibility, but %qD has already "
6057 "been declared with a different visibility",
6058 name, node);
6059 DECL_VISIBILITY (node) = VISIBILITY_DEFAULT;
6060 DECL_VISIBILITY_SPECIFIED (node) = 1;
6061 }
6062
6063 return NULL_TREE;
6064 }
6065
6066 #endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */
6067 \f
6068 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6069 of the various TYPE_QUAL values. */
6070
6071 static void
6072 set_type_quals (tree type, int type_quals)
6073 {
6074 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6075 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6076 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6077 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6078 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6079 }
6080
6081 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6082
6083 bool
6084 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6085 {
6086 return (TYPE_QUALS (cand) == type_quals
6087 && TYPE_NAME (cand) == TYPE_NAME (base)
6088 /* Apparently this is needed for Objective-C. */
6089 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6090 /* Check alignment. */
6091 && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
6092 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6093 TYPE_ATTRIBUTES (base)));
6094 }
6095
6096 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6097
6098 static bool
6099 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6100 {
6101 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6102 && TYPE_NAME (cand) == TYPE_NAME (base)
6103 /* Apparently this is needed for Objective-C. */
6104 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6105 /* Check alignment. */
6106 && TYPE_ALIGN (cand) == align
6107 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6108 TYPE_ATTRIBUTES (base)));
6109 }
6110
6111 /* This function checks to see if TYPE matches the size one of the built-in
6112 atomic types, and returns that core atomic type. */
6113
6114 static tree
6115 find_atomic_core_type (tree type)
6116 {
6117 tree base_atomic_type;
6118
6119 /* Only handle complete types. */
6120 if (TYPE_SIZE (type) == NULL_TREE)
6121 return NULL_TREE;
6122
6123 HOST_WIDE_INT type_size = tree_low_cst (TYPE_SIZE (type), 1);
6124 switch (type_size)
6125 {
6126 case 8:
6127 base_atomic_type = atomicQI_type_node;
6128 break;
6129
6130 case 16:
6131 base_atomic_type = atomicHI_type_node;
6132 break;
6133
6134 case 32:
6135 base_atomic_type = atomicSI_type_node;
6136 break;
6137
6138 case 64:
6139 base_atomic_type = atomicDI_type_node;
6140 break;
6141
6142 case 128:
6143 base_atomic_type = atomicTI_type_node;
6144 break;
6145
6146 default:
6147 base_atomic_type = NULL_TREE;
6148 }
6149
6150 return base_atomic_type;
6151 }
6152
6153 /* Return a version of the TYPE, qualified as indicated by the
6154 TYPE_QUALS, if one exists. If no qualified version exists yet,
6155 return NULL_TREE. */
6156
6157 tree
6158 get_qualified_type (tree type, int type_quals)
6159 {
6160 tree t;
6161
6162 if (TYPE_QUALS (type) == type_quals)
6163 return type;
6164
6165 /* Search the chain of variants to see if there is already one there just
6166 like the one we need to have. If so, use that existing one. We must
6167 preserve the TYPE_NAME, since there is code that depends on this. */
6168 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6169 if (check_qualified_type (t, type, type_quals))
6170 return t;
6171
6172 return NULL_TREE;
6173 }
6174
6175 /* Like get_qualified_type, but creates the type if it does not
6176 exist. This function never returns NULL_TREE. */
6177
6178 tree
6179 build_qualified_type (tree type, int type_quals)
6180 {
6181 tree t;
6182
6183 /* See if we already have the appropriate qualified variant. */
6184 t = get_qualified_type (type, type_quals);
6185
6186 /* If not, build it. */
6187 if (!t)
6188 {
6189 t = build_variant_type_copy (type);
6190 set_type_quals (t, type_quals);
6191
6192 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6193 {
6194 /* See if this object can map to a basic atomic type. */
6195 tree atomic_type = find_atomic_core_type (type);
6196 if (atomic_type)
6197 {
6198 /* Ensure the alignment of this type is compatible with
6199 the required alignment of the atomic type. */
6200 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6201 TYPE_ALIGN (t) = TYPE_ALIGN (atomic_type);
6202 }
6203 }
6204
6205 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6206 /* Propagate structural equality. */
6207 SET_TYPE_STRUCTURAL_EQUALITY (t);
6208 else if (TYPE_CANONICAL (type) != type)
6209 /* Build the underlying canonical type, since it is different
6210 from TYPE. */
6211 TYPE_CANONICAL (t) = build_qualified_type (TYPE_CANONICAL (type),
6212 type_quals);
6213 else
6214 /* T is its own canonical type. */
6215 TYPE_CANONICAL (t) = t;
6216
6217 }
6218
6219 return t;
6220 }
6221
6222 /* Create a variant of type T with alignment ALIGN. */
6223
6224 tree
6225 build_aligned_type (tree type, unsigned int align)
6226 {
6227 tree t;
6228
6229 if (TYPE_PACKED (type)
6230 || TYPE_ALIGN (type) == align)
6231 return type;
6232
6233 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6234 if (check_aligned_type (t, type, align))
6235 return t;
6236
6237 t = build_variant_type_copy (type);
6238 TYPE_ALIGN (t) = align;
6239
6240 return t;
6241 }
6242
6243 /* Create a new distinct copy of TYPE. The new type is made its own
6244 MAIN_VARIANT. If TYPE requires structural equality checks, the
6245 resulting type requires structural equality checks; otherwise, its
6246 TYPE_CANONICAL points to itself. */
6247
6248 tree
6249 build_distinct_type_copy (tree type)
6250 {
6251 tree t = copy_node (type);
6252
6253 TYPE_POINTER_TO (t) = 0;
6254 TYPE_REFERENCE_TO (t) = 0;
6255
6256 /* Set the canonical type either to a new equivalence class, or
6257 propagate the need for structural equality checks. */
6258 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6259 SET_TYPE_STRUCTURAL_EQUALITY (t);
6260 else
6261 TYPE_CANONICAL (t) = t;
6262
6263 /* Make it its own variant. */
6264 TYPE_MAIN_VARIANT (t) = t;
6265 TYPE_NEXT_VARIANT (t) = 0;
6266
6267 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6268 whose TREE_TYPE is not t. This can also happen in the Ada
6269 frontend when using subtypes. */
6270
6271 return t;
6272 }
6273
6274 /* Create a new variant of TYPE, equivalent but distinct. This is so
6275 the caller can modify it. TYPE_CANONICAL for the return type will
6276 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6277 are considered equal by the language itself (or that both types
6278 require structural equality checks). */
6279
6280 tree
6281 build_variant_type_copy (tree type)
6282 {
6283 tree t, m = TYPE_MAIN_VARIANT (type);
6284
6285 t = build_distinct_type_copy (type);
6286
6287 /* Since we're building a variant, assume that it is a non-semantic
6288 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6289 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6290
6291 /* Add the new type to the chain of variants of TYPE. */
6292 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6293 TYPE_NEXT_VARIANT (m) = t;
6294 TYPE_MAIN_VARIANT (t) = m;
6295
6296 return t;
6297 }
6298 \f
6299 /* Return true if the from tree in both tree maps are equal. */
6300
6301 int
6302 tree_map_base_eq (const void *va, const void *vb)
6303 {
6304 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6305 *const b = (const struct tree_map_base *) vb;
6306 return (a->from == b->from);
6307 }
6308
6309 /* Hash a from tree in a tree_base_map. */
6310
6311 unsigned int
6312 tree_map_base_hash (const void *item)
6313 {
6314 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6315 }
6316
6317 /* Return true if this tree map structure is marked for garbage collection
6318 purposes. We simply return true if the from tree is marked, so that this
6319 structure goes away when the from tree goes away. */
6320
6321 int
6322 tree_map_base_marked_p (const void *p)
6323 {
6324 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6325 }
6326
6327 /* Hash a from tree in a tree_map. */
6328
6329 unsigned int
6330 tree_map_hash (const void *item)
6331 {
6332 return (((const struct tree_map *) item)->hash);
6333 }
6334
6335 /* Hash a from tree in a tree_decl_map. */
6336
6337 unsigned int
6338 tree_decl_map_hash (const void *item)
6339 {
6340 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6341 }
6342
6343 /* Return the initialization priority for DECL. */
6344
6345 priority_type
6346 decl_init_priority_lookup (tree decl)
6347 {
6348 struct tree_priority_map *h;
6349 struct tree_map_base in;
6350
6351 gcc_assert (VAR_OR_FUNCTION_DECL_P (decl));
6352 in.from = decl;
6353 h = (struct tree_priority_map *) htab_find (init_priority_for_decl, &in);
6354 return h ? h->init : DEFAULT_INIT_PRIORITY;
6355 }
6356
6357 /* Return the finalization priority for DECL. */
6358
6359 priority_type
6360 decl_fini_priority_lookup (tree decl)
6361 {
6362 struct tree_priority_map *h;
6363 struct tree_map_base in;
6364
6365 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
6366 in.from = decl;
6367 h = (struct tree_priority_map *) htab_find (init_priority_for_decl, &in);
6368 return h ? h->fini : DEFAULT_INIT_PRIORITY;
6369 }
6370
6371 /* Return the initialization and finalization priority information for
6372 DECL. If there is no previous priority information, a freshly
6373 allocated structure is returned. */
6374
6375 static struct tree_priority_map *
6376 decl_priority_info (tree decl)
6377 {
6378 struct tree_priority_map in;
6379 struct tree_priority_map *h;
6380 void **loc;
6381
6382 in.base.from = decl;
6383 loc = htab_find_slot (init_priority_for_decl, &in, INSERT);
6384 h = (struct tree_priority_map *) *loc;
6385 if (!h)
6386 {
6387 h = ggc_alloc_cleared_tree_priority_map ();
6388 *loc = h;
6389 h->base.from = decl;
6390 h->init = DEFAULT_INIT_PRIORITY;
6391 h->fini = DEFAULT_INIT_PRIORITY;
6392 }
6393
6394 return h;
6395 }
6396
6397 /* Set the initialization priority for DECL to PRIORITY. */
6398
6399 void
6400 decl_init_priority_insert (tree decl, priority_type priority)
6401 {
6402 struct tree_priority_map *h;
6403
6404 gcc_assert (VAR_OR_FUNCTION_DECL_P (decl));
6405 if (priority == DEFAULT_INIT_PRIORITY)
6406 return;
6407 h = decl_priority_info (decl);
6408 h->init = priority;
6409 }
6410
6411 /* Set the finalization priority for DECL to PRIORITY. */
6412
6413 void
6414 decl_fini_priority_insert (tree decl, priority_type priority)
6415 {
6416 struct tree_priority_map *h;
6417
6418 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
6419 if (priority == DEFAULT_INIT_PRIORITY)
6420 return;
6421 h = decl_priority_info (decl);
6422 h->fini = priority;
6423 }
6424
6425 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6426
6427 static void
6428 print_debug_expr_statistics (void)
6429 {
6430 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6431 (long) htab_size (debug_expr_for_decl),
6432 (long) htab_elements (debug_expr_for_decl),
6433 htab_collisions (debug_expr_for_decl));
6434 }
6435
6436 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6437
6438 static void
6439 print_value_expr_statistics (void)
6440 {
6441 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6442 (long) htab_size (value_expr_for_decl),
6443 (long) htab_elements (value_expr_for_decl),
6444 htab_collisions (value_expr_for_decl));
6445 }
6446
6447 /* Lookup a debug expression for FROM, and return it if we find one. */
6448
6449 tree
6450 decl_debug_expr_lookup (tree from)
6451 {
6452 struct tree_decl_map *h, in;
6453 in.base.from = from;
6454
6455 h = (struct tree_decl_map *)
6456 htab_find_with_hash (debug_expr_for_decl, &in, DECL_UID (from));
6457 if (h)
6458 return h->to;
6459 return NULL_TREE;
6460 }
6461
6462 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6463
6464 void
6465 decl_debug_expr_insert (tree from, tree to)
6466 {
6467 struct tree_decl_map *h;
6468 void **loc;
6469
6470 h = ggc_alloc_tree_decl_map ();
6471 h->base.from = from;
6472 h->to = to;
6473 loc = htab_find_slot_with_hash (debug_expr_for_decl, h, DECL_UID (from),
6474 INSERT);
6475 *(struct tree_decl_map **) loc = h;
6476 }
6477
6478 /* Lookup a value expression for FROM, and return it if we find one. */
6479
6480 tree
6481 decl_value_expr_lookup (tree from)
6482 {
6483 struct tree_decl_map *h, in;
6484 in.base.from = from;
6485
6486 h = (struct tree_decl_map *)
6487 htab_find_with_hash (value_expr_for_decl, &in, DECL_UID (from));
6488 if (h)
6489 return h->to;
6490 return NULL_TREE;
6491 }
6492
6493 /* Insert a mapping FROM->TO in the value expression hashtable. */
6494
6495 void
6496 decl_value_expr_insert (tree from, tree to)
6497 {
6498 struct tree_decl_map *h;
6499 void **loc;
6500
6501 h = ggc_alloc_tree_decl_map ();
6502 h->base.from = from;
6503 h->to = to;
6504 loc = htab_find_slot_with_hash (value_expr_for_decl, h, DECL_UID (from),
6505 INSERT);
6506 *(struct tree_decl_map **) loc = h;
6507 }
6508
6509 /* Lookup a vector of debug arguments for FROM, and return it if we
6510 find one. */
6511
6512 vec<tree, va_gc> **
6513 decl_debug_args_lookup (tree from)
6514 {
6515 struct tree_vec_map *h, in;
6516
6517 if (!DECL_HAS_DEBUG_ARGS_P (from))
6518 return NULL;
6519 gcc_checking_assert (debug_args_for_decl != NULL);
6520 in.base.from = from;
6521 h = (struct tree_vec_map *)
6522 htab_find_with_hash (debug_args_for_decl, &in, DECL_UID (from));
6523 if (h)
6524 return &h->to;
6525 return NULL;
6526 }
6527
6528 /* Insert a mapping FROM->empty vector of debug arguments in the value
6529 expression hashtable. */
6530
6531 vec<tree, va_gc> **
6532 decl_debug_args_insert (tree from)
6533 {
6534 struct tree_vec_map *h;
6535 void **loc;
6536
6537 if (DECL_HAS_DEBUG_ARGS_P (from))
6538 return decl_debug_args_lookup (from);
6539 if (debug_args_for_decl == NULL)
6540 debug_args_for_decl = htab_create_ggc (64, tree_vec_map_hash,
6541 tree_vec_map_eq, 0);
6542 h = ggc_alloc_tree_vec_map ();
6543 h->base.from = from;
6544 h->to = NULL;
6545 loc = htab_find_slot_with_hash (debug_args_for_decl, h, DECL_UID (from),
6546 INSERT);
6547 *(struct tree_vec_map **) loc = h;
6548 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6549 return &h->to;
6550 }
6551
6552 /* Hashing of types so that we don't make duplicates.
6553 The entry point is `type_hash_canon'. */
6554
6555 /* Compute a hash code for a list of types (chain of TREE_LIST nodes
6556 with types in the TREE_VALUE slots), by adding the hash codes
6557 of the individual types. */
6558
6559 static unsigned int
6560 type_hash_list (const_tree list, hashval_t hashcode)
6561 {
6562 const_tree tail;
6563
6564 for (tail = list; tail; tail = TREE_CHAIN (tail))
6565 if (TREE_VALUE (tail) != error_mark_node)
6566 hashcode = iterative_hash_object (TYPE_HASH (TREE_VALUE (tail)),
6567 hashcode);
6568
6569 return hashcode;
6570 }
6571
6572 /* These are the Hashtable callback functions. */
6573
6574 /* Returns true iff the types are equivalent. */
6575
6576 static int
6577 type_hash_eq (const void *va, const void *vb)
6578 {
6579 const struct type_hash *const a = (const struct type_hash *) va,
6580 *const b = (const struct type_hash *) vb;
6581
6582 /* First test the things that are the same for all types. */
6583 if (a->hash != b->hash
6584 || TREE_CODE (a->type) != TREE_CODE (b->type)
6585 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6586 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6587 TYPE_ATTRIBUTES (b->type))
6588 || (TREE_CODE (a->type) != COMPLEX_TYPE
6589 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6590 return 0;
6591
6592 /* Be careful about comparing arrays before and after the element type
6593 has been completed; don't compare TYPE_ALIGN unless both types are
6594 complete. */
6595 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6596 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6597 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6598 return 0;
6599
6600 switch (TREE_CODE (a->type))
6601 {
6602 case VOID_TYPE:
6603 case COMPLEX_TYPE:
6604 case POINTER_TYPE:
6605 case REFERENCE_TYPE:
6606 case NULLPTR_TYPE:
6607 return 1;
6608
6609 case VECTOR_TYPE:
6610 return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type);
6611
6612 case ENUMERAL_TYPE:
6613 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6614 && !(TYPE_VALUES (a->type)
6615 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6616 && TYPE_VALUES (b->type)
6617 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6618 && type_list_equal (TYPE_VALUES (a->type),
6619 TYPE_VALUES (b->type))))
6620 return 0;
6621
6622 /* ... fall through ... */
6623
6624 case INTEGER_TYPE:
6625 case REAL_TYPE:
6626 case BOOLEAN_TYPE:
6627 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6628 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6629 TYPE_MAX_VALUE (b->type)))
6630 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6631 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6632 TYPE_MIN_VALUE (b->type))));
6633
6634 case FIXED_POINT_TYPE:
6635 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6636
6637 case OFFSET_TYPE:
6638 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6639
6640 case METHOD_TYPE:
6641 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6642 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6643 || (TYPE_ARG_TYPES (a->type)
6644 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6645 && TYPE_ARG_TYPES (b->type)
6646 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6647 && type_list_equal (TYPE_ARG_TYPES (a->type),
6648 TYPE_ARG_TYPES (b->type)))))
6649 break;
6650 return 0;
6651 case ARRAY_TYPE:
6652 return TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type);
6653
6654 case RECORD_TYPE:
6655 case UNION_TYPE:
6656 case QUAL_UNION_TYPE:
6657 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6658 || (TYPE_FIELDS (a->type)
6659 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6660 && TYPE_FIELDS (b->type)
6661 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6662 && type_list_equal (TYPE_FIELDS (a->type),
6663 TYPE_FIELDS (b->type))));
6664
6665 case FUNCTION_TYPE:
6666 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6667 || (TYPE_ARG_TYPES (a->type)
6668 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6669 && TYPE_ARG_TYPES (b->type)
6670 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6671 && type_list_equal (TYPE_ARG_TYPES (a->type),
6672 TYPE_ARG_TYPES (b->type))))
6673 break;
6674 return 0;
6675
6676 default:
6677 return 0;
6678 }
6679
6680 if (lang_hooks.types.type_hash_eq != NULL)
6681 return lang_hooks.types.type_hash_eq (a->type, b->type);
6682
6683 return 1;
6684 }
6685
6686 /* Return the cached hash value. */
6687
6688 static hashval_t
6689 type_hash_hash (const void *item)
6690 {
6691 return ((const struct type_hash *) item)->hash;
6692 }
6693
6694 /* Look in the type hash table for a type isomorphic to TYPE.
6695 If one is found, return it. Otherwise return 0. */
6696
6697 static tree
6698 type_hash_lookup (hashval_t hashcode, tree type)
6699 {
6700 struct type_hash *h, in;
6701
6702 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6703 must call that routine before comparing TYPE_ALIGNs. */
6704 layout_type (type);
6705
6706 in.hash = hashcode;
6707 in.type = type;
6708
6709 h = (struct type_hash *) htab_find_with_hash (type_hash_table, &in,
6710 hashcode);
6711 if (h)
6712 return h->type;
6713 return NULL_TREE;
6714 }
6715
6716 /* Add an entry to the type-hash-table
6717 for a type TYPE whose hash code is HASHCODE. */
6718
6719 static void
6720 type_hash_add (hashval_t hashcode, tree type)
6721 {
6722 struct type_hash *h;
6723 void **loc;
6724
6725 h = ggc_alloc_type_hash ();
6726 h->hash = hashcode;
6727 h->type = type;
6728 loc = htab_find_slot_with_hash (type_hash_table, h, hashcode, INSERT);
6729 *loc = (void *)h;
6730 }
6731
6732 /* Given TYPE, and HASHCODE its hash code, return the canonical
6733 object for an identical type if one already exists.
6734 Otherwise, return TYPE, and record it as the canonical object.
6735
6736 To use this function, first create a type of the sort you want.
6737 Then compute its hash code from the fields of the type that
6738 make it different from other similar types.
6739 Then call this function and use the value. */
6740
6741 tree
6742 type_hash_canon (unsigned int hashcode, tree type)
6743 {
6744 tree t1;
6745
6746 /* The hash table only contains main variants, so ensure that's what we're
6747 being passed. */
6748 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6749
6750 /* See if the type is in the hash table already. If so, return it.
6751 Otherwise, add the type. */
6752 t1 = type_hash_lookup (hashcode, type);
6753 if (t1 != 0)
6754 {
6755 if (GATHER_STATISTICS)
6756 {
6757 tree_code_counts[(int) TREE_CODE (type)]--;
6758 tree_node_counts[(int) t_kind]--;
6759 tree_node_sizes[(int) t_kind] -= sizeof (struct tree_type_non_common);
6760 }
6761 return t1;
6762 }
6763 else
6764 {
6765 type_hash_add (hashcode, type);
6766 return type;
6767 }
6768 }
6769
6770 /* See if the data pointed to by the type hash table is marked. We consider
6771 it marked if the type is marked or if a debug type number or symbol
6772 table entry has been made for the type. */
6773
6774 static int
6775 type_hash_marked_p (const void *p)
6776 {
6777 const_tree const type = ((const struct type_hash *) p)->type;
6778
6779 return ggc_marked_p (type);
6780 }
6781
6782 static void
6783 print_type_hash_statistics (void)
6784 {
6785 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
6786 (long) htab_size (type_hash_table),
6787 (long) htab_elements (type_hash_table),
6788 htab_collisions (type_hash_table));
6789 }
6790
6791 /* Compute a hash code for a list of attributes (chain of TREE_LIST nodes
6792 with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots),
6793 by adding the hash codes of the individual attributes. */
6794
6795 static unsigned int
6796 attribute_hash_list (const_tree list, hashval_t hashcode)
6797 {
6798 const_tree tail;
6799
6800 for (tail = list; tail; tail = TREE_CHAIN (tail))
6801 /* ??? Do we want to add in TREE_VALUE too? */
6802 hashcode = iterative_hash_object
6803 (IDENTIFIER_HASH_VALUE (get_attribute_name (tail)), hashcode);
6804 return hashcode;
6805 }
6806
6807 /* Given two lists of attributes, return true if list l2 is
6808 equivalent to l1. */
6809
6810 int
6811 attribute_list_equal (const_tree l1, const_tree l2)
6812 {
6813 if (l1 == l2)
6814 return 1;
6815
6816 return attribute_list_contained (l1, l2)
6817 && attribute_list_contained (l2, l1);
6818 }
6819
6820 /* Given two lists of attributes, return true if list L2 is
6821 completely contained within L1. */
6822 /* ??? This would be faster if attribute names were stored in a canonicalized
6823 form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method
6824 must be used to show these elements are equivalent (which they are). */
6825 /* ??? It's not clear that attributes with arguments will always be handled
6826 correctly. */
6827
6828 int
6829 attribute_list_contained (const_tree l1, const_tree l2)
6830 {
6831 const_tree t1, t2;
6832
6833 /* First check the obvious, maybe the lists are identical. */
6834 if (l1 == l2)
6835 return 1;
6836
6837 /* Maybe the lists are similar. */
6838 for (t1 = l1, t2 = l2;
6839 t1 != 0 && t2 != 0
6840 && get_attribute_name (t1) == get_attribute_name (t2)
6841 && TREE_VALUE (t1) == TREE_VALUE (t2);
6842 t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6843 ;
6844
6845 /* Maybe the lists are equal. */
6846 if (t1 == 0 && t2 == 0)
6847 return 1;
6848
6849 for (; t2 != 0; t2 = TREE_CHAIN (t2))
6850 {
6851 const_tree attr;
6852 /* This CONST_CAST is okay because lookup_attribute does not
6853 modify its argument and the return value is assigned to a
6854 const_tree. */
6855 for (attr = lookup_ident_attribute (get_attribute_name (t2),
6856 CONST_CAST_TREE (l1));
6857 attr != NULL_TREE && !attribute_value_equal (t2, attr);
6858 attr = lookup_ident_attribute (get_attribute_name (t2),
6859 TREE_CHAIN (attr)))
6860 ;
6861
6862 if (attr == NULL_TREE)
6863 return 0;
6864 }
6865
6866 return 1;
6867 }
6868
6869 /* Given two lists of types
6870 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6871 return 1 if the lists contain the same types in the same order.
6872 Also, the TREE_PURPOSEs must match. */
6873
6874 int
6875 type_list_equal (const_tree l1, const_tree l2)
6876 {
6877 const_tree t1, t2;
6878
6879 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6880 if (TREE_VALUE (t1) != TREE_VALUE (t2)
6881 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
6882 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
6883 && (TREE_TYPE (TREE_PURPOSE (t1))
6884 == TREE_TYPE (TREE_PURPOSE (t2))))))
6885 return 0;
6886
6887 return t1 == t2;
6888 }
6889
6890 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
6891 given by TYPE. If the argument list accepts variable arguments,
6892 then this function counts only the ordinary arguments. */
6893
6894 int
6895 type_num_arguments (const_tree type)
6896 {
6897 int i = 0;
6898 tree t;
6899
6900 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
6901 /* If the function does not take a variable number of arguments,
6902 the last element in the list will have type `void'. */
6903 if (VOID_TYPE_P (TREE_VALUE (t)))
6904 break;
6905 else
6906 ++i;
6907
6908 return i;
6909 }
6910
6911 /* Nonzero if integer constants T1 and T2
6912 represent the same constant value. */
6913
6914 int
6915 tree_int_cst_equal (const_tree t1, const_tree t2)
6916 {
6917 if (t1 == t2)
6918 return 1;
6919
6920 if (t1 == 0 || t2 == 0)
6921 return 0;
6922
6923 if (TREE_CODE (t1) == INTEGER_CST
6924 && TREE_CODE (t2) == INTEGER_CST
6925 && TREE_INT_CST_LOW (t1) == TREE_INT_CST_LOW (t2)
6926 && TREE_INT_CST_HIGH (t1) == TREE_INT_CST_HIGH (t2))
6927 return 1;
6928
6929 return 0;
6930 }
6931
6932 /* Nonzero if integer constants T1 and T2 represent values that satisfy <.
6933 The precise way of comparison depends on their data type. */
6934
6935 int
6936 tree_int_cst_lt (const_tree t1, const_tree t2)
6937 {
6938 if (t1 == t2)
6939 return 0;
6940
6941 if (TYPE_UNSIGNED (TREE_TYPE (t1)) != TYPE_UNSIGNED (TREE_TYPE (t2)))
6942 {
6943 int t1_sgn = tree_int_cst_sgn (t1);
6944 int t2_sgn = tree_int_cst_sgn (t2);
6945
6946 if (t1_sgn < t2_sgn)
6947 return 1;
6948 else if (t1_sgn > t2_sgn)
6949 return 0;
6950 /* Otherwise, both are non-negative, so we compare them as
6951 unsigned just in case one of them would overflow a signed
6952 type. */
6953 }
6954 else if (!TYPE_UNSIGNED (TREE_TYPE (t1)))
6955 return INT_CST_LT (t1, t2);
6956
6957 return INT_CST_LT_UNSIGNED (t1, t2);
6958 }
6959
6960 /* Returns -1 if T1 < T2, 0 if T1 == T2, and 1 if T1 > T2. */
6961
6962 int
6963 tree_int_cst_compare (const_tree t1, const_tree t2)
6964 {
6965 if (tree_int_cst_lt (t1, t2))
6966 return -1;
6967 else if (tree_int_cst_lt (t2, t1))
6968 return 1;
6969 else
6970 return 0;
6971 }
6972
6973 /* Return 1 if T is an INTEGER_CST that can be manipulated efficiently on
6974 the host. If POS is zero, the value can be represented in a single
6975 HOST_WIDE_INT. If POS is nonzero, the value must be non-negative and can
6976 be represented in a single unsigned HOST_WIDE_INT. */
6977
6978 int
6979 host_integerp (const_tree t, int pos)
6980 {
6981 if (t == NULL_TREE)
6982 return 0;
6983
6984 return (TREE_CODE (t) == INTEGER_CST
6985 && ((TREE_INT_CST_HIGH (t) == 0
6986 && (HOST_WIDE_INT) TREE_INT_CST_LOW (t) >= 0)
6987 || (! pos && TREE_INT_CST_HIGH (t) == -1
6988 && (HOST_WIDE_INT) TREE_INT_CST_LOW (t) < 0
6989 && !TYPE_UNSIGNED (TREE_TYPE (t)))
6990 || (pos && TREE_INT_CST_HIGH (t) == 0)));
6991 }
6992
6993 /* Return true if T is an INTEGER_CST whose numerical value (extended
6994 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
6995
6996 bool
6997 tree_fits_shwi_p (const_tree t)
6998 {
6999 return (t != NULL_TREE
7000 && TREE_CODE (t) == INTEGER_CST
7001 && ((TREE_INT_CST_HIGH (t) == 0
7002 && (HOST_WIDE_INT) TREE_INT_CST_LOW (t) >= 0)
7003 || (TREE_INT_CST_HIGH (t) == -1
7004 && (HOST_WIDE_INT) TREE_INT_CST_LOW (t) < 0
7005 && !TYPE_UNSIGNED (TREE_TYPE (t)))));
7006 }
7007
7008 /* Return true if T is an INTEGER_CST whose numerical value (extended
7009 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
7010
7011 bool
7012 tree_fits_uhwi_p (const_tree t)
7013 {
7014 return (t != NULL_TREE
7015 && TREE_CODE (t) == INTEGER_CST
7016 && TREE_INT_CST_HIGH (t) == 0);
7017 }
7018
7019 /* Return the HOST_WIDE_INT least significant bits of T if it is an
7020 INTEGER_CST and there is no overflow. POS is nonzero if the result must
7021 be non-negative. We must be able to satisfy the above conditions. */
7022
7023 HOST_WIDE_INT
7024 tree_low_cst (const_tree t, int pos)
7025 {
7026 gcc_assert (host_integerp (t, pos));
7027 return TREE_INT_CST_LOW (t);
7028 }
7029
7030 /* Return the most significant (sign) bit of T. */
7031
7032 int
7033 tree_int_cst_sign_bit (const_tree t)
7034 {
7035 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7036 unsigned HOST_WIDE_INT w;
7037
7038 if (bitno < HOST_BITS_PER_WIDE_INT)
7039 w = TREE_INT_CST_LOW (t);
7040 else
7041 {
7042 w = TREE_INT_CST_HIGH (t);
7043 bitno -= HOST_BITS_PER_WIDE_INT;
7044 }
7045
7046 return (w >> bitno) & 1;
7047 }
7048
7049 /* Return an indication of the sign of the integer constant T.
7050 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7051 Note that -1 will never be returned if T's type is unsigned. */
7052
7053 int
7054 tree_int_cst_sgn (const_tree t)
7055 {
7056 if (TREE_INT_CST_LOW (t) == 0 && TREE_INT_CST_HIGH (t) == 0)
7057 return 0;
7058 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7059 return 1;
7060 else if (TREE_INT_CST_HIGH (t) < 0)
7061 return -1;
7062 else
7063 return 1;
7064 }
7065
7066 /* Return the minimum number of bits needed to represent VALUE in a
7067 signed or unsigned type, UNSIGNEDP says which. */
7068
7069 unsigned int
7070 tree_int_cst_min_precision (tree value, bool unsignedp)
7071 {
7072 /* If the value is negative, compute its negative minus 1. The latter
7073 adjustment is because the absolute value of the largest negative value
7074 is one larger than the largest positive value. This is equivalent to
7075 a bit-wise negation, so use that operation instead. */
7076
7077 if (tree_int_cst_sgn (value) < 0)
7078 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7079
7080 /* Return the number of bits needed, taking into account the fact
7081 that we need one more bit for a signed than unsigned type.
7082 If value is 0 or -1, the minimum precision is 1 no matter
7083 whether unsignedp is true or false. */
7084
7085 if (integer_zerop (value))
7086 return 1;
7087 else
7088 return tree_floor_log2 (value) + 1 + !unsignedp;
7089 }
7090
7091 /* Return truthvalue of whether T1 is the same tree structure as T2.
7092 Return 1 if they are the same.
7093 Return 0 if they are understandably different.
7094 Return -1 if either contains tree structure not understood by
7095 this function. */
7096
7097 int
7098 simple_cst_equal (const_tree t1, const_tree t2)
7099 {
7100 enum tree_code code1, code2;
7101 int cmp;
7102 int i;
7103
7104 if (t1 == t2)
7105 return 1;
7106 if (t1 == 0 || t2 == 0)
7107 return 0;
7108
7109 code1 = TREE_CODE (t1);
7110 code2 = TREE_CODE (t2);
7111
7112 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7113 {
7114 if (CONVERT_EXPR_CODE_P (code2)
7115 || code2 == NON_LVALUE_EXPR)
7116 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7117 else
7118 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7119 }
7120
7121 else if (CONVERT_EXPR_CODE_P (code2)
7122 || code2 == NON_LVALUE_EXPR)
7123 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7124
7125 if (code1 != code2)
7126 return 0;
7127
7128 switch (code1)
7129 {
7130 case INTEGER_CST:
7131 return (TREE_INT_CST_LOW (t1) == TREE_INT_CST_LOW (t2)
7132 && TREE_INT_CST_HIGH (t1) == TREE_INT_CST_HIGH (t2));
7133
7134 case REAL_CST:
7135 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (t1), TREE_REAL_CST (t2));
7136
7137 case FIXED_CST:
7138 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7139
7140 case STRING_CST:
7141 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7142 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7143 TREE_STRING_LENGTH (t1)));
7144
7145 case CONSTRUCTOR:
7146 {
7147 unsigned HOST_WIDE_INT idx;
7148 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7149 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7150
7151 if (vec_safe_length (v1) != vec_safe_length (v2))
7152 return false;
7153
7154 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7155 /* ??? Should we handle also fields here? */
7156 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7157 return false;
7158 return true;
7159 }
7160
7161 case SAVE_EXPR:
7162 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7163
7164 case CALL_EXPR:
7165 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7166 if (cmp <= 0)
7167 return cmp;
7168 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7169 return 0;
7170 {
7171 const_tree arg1, arg2;
7172 const_call_expr_arg_iterator iter1, iter2;
7173 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7174 arg2 = first_const_call_expr_arg (t2, &iter2);
7175 arg1 && arg2;
7176 arg1 = next_const_call_expr_arg (&iter1),
7177 arg2 = next_const_call_expr_arg (&iter2))
7178 {
7179 cmp = simple_cst_equal (arg1, arg2);
7180 if (cmp <= 0)
7181 return cmp;
7182 }
7183 return arg1 == arg2;
7184 }
7185
7186 case TARGET_EXPR:
7187 /* Special case: if either target is an unallocated VAR_DECL,
7188 it means that it's going to be unified with whatever the
7189 TARGET_EXPR is really supposed to initialize, so treat it
7190 as being equivalent to anything. */
7191 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7192 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7193 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7194 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7195 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7196 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7197 cmp = 1;
7198 else
7199 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7200
7201 if (cmp <= 0)
7202 return cmp;
7203
7204 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7205
7206 case WITH_CLEANUP_EXPR:
7207 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7208 if (cmp <= 0)
7209 return cmp;
7210
7211 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7212
7213 case COMPONENT_REF:
7214 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7215 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7216
7217 return 0;
7218
7219 case VAR_DECL:
7220 case PARM_DECL:
7221 case CONST_DECL:
7222 case FUNCTION_DECL:
7223 return 0;
7224
7225 default:
7226 break;
7227 }
7228
7229 /* This general rule works for most tree codes. All exceptions should be
7230 handled above. If this is a language-specific tree code, we can't
7231 trust what might be in the operand, so say we don't know
7232 the situation. */
7233 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7234 return -1;
7235
7236 switch (TREE_CODE_CLASS (code1))
7237 {
7238 case tcc_unary:
7239 case tcc_binary:
7240 case tcc_comparison:
7241 case tcc_expression:
7242 case tcc_reference:
7243 case tcc_statement:
7244 cmp = 1;
7245 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7246 {
7247 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7248 if (cmp <= 0)
7249 return cmp;
7250 }
7251
7252 return cmp;
7253
7254 default:
7255 return -1;
7256 }
7257 }
7258
7259 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7260 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7261 than U, respectively. */
7262
7263 int
7264 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7265 {
7266 if (tree_int_cst_sgn (t) < 0)
7267 return -1;
7268 else if (TREE_INT_CST_HIGH (t) != 0)
7269 return 1;
7270 else if (TREE_INT_CST_LOW (t) == u)
7271 return 0;
7272 else if (TREE_INT_CST_LOW (t) < u)
7273 return -1;
7274 else
7275 return 1;
7276 }
7277
7278 /* Return true if SIZE represents a constant size that is in bounds of
7279 what the middle-end and the backend accepts (covering not more than
7280 half of the address-space). */
7281
7282 bool
7283 valid_constant_size_p (const_tree size)
7284 {
7285 if (! tree_fits_uhwi_p (size)
7286 || TREE_OVERFLOW (size)
7287 || tree_int_cst_sign_bit (size) != 0)
7288 return false;
7289 return true;
7290 }
7291
7292 /* Return the precision of the type, or for a complex or vector type the
7293 precision of the type of its elements. */
7294
7295 unsigned int
7296 element_precision (const_tree type)
7297 {
7298 enum tree_code code = TREE_CODE (type);
7299 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7300 type = TREE_TYPE (type);
7301
7302 return TYPE_PRECISION (type);
7303 }
7304
7305 /* Return true if CODE represents an associative tree code. Otherwise
7306 return false. */
7307 bool
7308 associative_tree_code (enum tree_code code)
7309 {
7310 switch (code)
7311 {
7312 case BIT_IOR_EXPR:
7313 case BIT_AND_EXPR:
7314 case BIT_XOR_EXPR:
7315 case PLUS_EXPR:
7316 case MULT_EXPR:
7317 case MIN_EXPR:
7318 case MAX_EXPR:
7319 return true;
7320
7321 default:
7322 break;
7323 }
7324 return false;
7325 }
7326
7327 /* Return true if CODE represents a commutative tree code. Otherwise
7328 return false. */
7329 bool
7330 commutative_tree_code (enum tree_code code)
7331 {
7332 switch (code)
7333 {
7334 case PLUS_EXPR:
7335 case MULT_EXPR:
7336 case MULT_HIGHPART_EXPR:
7337 case MIN_EXPR:
7338 case MAX_EXPR:
7339 case BIT_IOR_EXPR:
7340 case BIT_XOR_EXPR:
7341 case BIT_AND_EXPR:
7342 case NE_EXPR:
7343 case EQ_EXPR:
7344 case UNORDERED_EXPR:
7345 case ORDERED_EXPR:
7346 case UNEQ_EXPR:
7347 case LTGT_EXPR:
7348 case TRUTH_AND_EXPR:
7349 case TRUTH_XOR_EXPR:
7350 case TRUTH_OR_EXPR:
7351 case WIDEN_MULT_EXPR:
7352 case VEC_WIDEN_MULT_HI_EXPR:
7353 case VEC_WIDEN_MULT_LO_EXPR:
7354 case VEC_WIDEN_MULT_EVEN_EXPR:
7355 case VEC_WIDEN_MULT_ODD_EXPR:
7356 return true;
7357
7358 default:
7359 break;
7360 }
7361 return false;
7362 }
7363
7364 /* Return true if CODE represents a ternary tree code for which the
7365 first two operands are commutative. Otherwise return false. */
7366 bool
7367 commutative_ternary_tree_code (enum tree_code code)
7368 {
7369 switch (code)
7370 {
7371 case WIDEN_MULT_PLUS_EXPR:
7372 case WIDEN_MULT_MINUS_EXPR:
7373 return true;
7374
7375 default:
7376 break;
7377 }
7378 return false;
7379 }
7380
7381 /* Generate a hash value for an expression. This can be used iteratively
7382 by passing a previous result as the VAL argument.
7383
7384 This function is intended to produce the same hash for expressions which
7385 would compare equal using operand_equal_p. */
7386
7387 hashval_t
7388 iterative_hash_expr (const_tree t, hashval_t val)
7389 {
7390 int i;
7391 enum tree_code code;
7392 char tclass;
7393
7394 if (t == NULL_TREE)
7395 return iterative_hash_hashval_t (0, val);
7396
7397 code = TREE_CODE (t);
7398
7399 switch (code)
7400 {
7401 /* Alas, constants aren't shared, so we can't rely on pointer
7402 identity. */
7403 case INTEGER_CST:
7404 val = iterative_hash_host_wide_int (TREE_INT_CST_LOW (t), val);
7405 return iterative_hash_host_wide_int (TREE_INT_CST_HIGH (t), val);
7406 case REAL_CST:
7407 {
7408 unsigned int val2 = real_hash (TREE_REAL_CST_PTR (t));
7409
7410 return iterative_hash_hashval_t (val2, val);
7411 }
7412 case FIXED_CST:
7413 {
7414 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7415
7416 return iterative_hash_hashval_t (val2, val);
7417 }
7418 case STRING_CST:
7419 return iterative_hash (TREE_STRING_POINTER (t),
7420 TREE_STRING_LENGTH (t), val);
7421 case COMPLEX_CST:
7422 val = iterative_hash_expr (TREE_REALPART (t), val);
7423 return iterative_hash_expr (TREE_IMAGPART (t), val);
7424 case VECTOR_CST:
7425 {
7426 unsigned i;
7427 for (i = 0; i < VECTOR_CST_NELTS (t); ++i)
7428 val = iterative_hash_expr (VECTOR_CST_ELT (t, i), val);
7429 return val;
7430 }
7431 case SSA_NAME:
7432 /* We can just compare by pointer. */
7433 return iterative_hash_host_wide_int (SSA_NAME_VERSION (t), val);
7434 case PLACEHOLDER_EXPR:
7435 /* The node itself doesn't matter. */
7436 return val;
7437 case TREE_LIST:
7438 /* A list of expressions, for a CALL_EXPR or as the elements of a
7439 VECTOR_CST. */
7440 for (; t; t = TREE_CHAIN (t))
7441 val = iterative_hash_expr (TREE_VALUE (t), val);
7442 return val;
7443 case CONSTRUCTOR:
7444 {
7445 unsigned HOST_WIDE_INT idx;
7446 tree field, value;
7447 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7448 {
7449 val = iterative_hash_expr (field, val);
7450 val = iterative_hash_expr (value, val);
7451 }
7452 return val;
7453 }
7454 case FUNCTION_DECL:
7455 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7456 Otherwise nodes that compare equal according to operand_equal_p might
7457 get different hash codes. However, don't do this for machine specific
7458 or front end builtins, since the function code is overloaded in those
7459 cases. */
7460 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7461 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7462 {
7463 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7464 code = TREE_CODE (t);
7465 }
7466 /* FALL THROUGH */
7467 default:
7468 tclass = TREE_CODE_CLASS (code);
7469
7470 if (tclass == tcc_declaration)
7471 {
7472 /* DECL's have a unique ID */
7473 val = iterative_hash_host_wide_int (DECL_UID (t), val);
7474 }
7475 else
7476 {
7477 gcc_assert (IS_EXPR_CODE_CLASS (tclass));
7478
7479 val = iterative_hash_object (code, val);
7480
7481 /* Don't hash the type, that can lead to having nodes which
7482 compare equal according to operand_equal_p, but which
7483 have different hash codes. */
7484 if (CONVERT_EXPR_CODE_P (code)
7485 || code == NON_LVALUE_EXPR)
7486 {
7487 /* Make sure to include signness in the hash computation. */
7488 val += TYPE_UNSIGNED (TREE_TYPE (t));
7489 val = iterative_hash_expr (TREE_OPERAND (t, 0), val);
7490 }
7491
7492 else if (commutative_tree_code (code))
7493 {
7494 /* It's a commutative expression. We want to hash it the same
7495 however it appears. We do this by first hashing both operands
7496 and then rehashing based on the order of their independent
7497 hashes. */
7498 hashval_t one = iterative_hash_expr (TREE_OPERAND (t, 0), 0);
7499 hashval_t two = iterative_hash_expr (TREE_OPERAND (t, 1), 0);
7500 hashval_t t;
7501
7502 if (one > two)
7503 t = one, one = two, two = t;
7504
7505 val = iterative_hash_hashval_t (one, val);
7506 val = iterative_hash_hashval_t (two, val);
7507 }
7508 else
7509 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
7510 val = iterative_hash_expr (TREE_OPERAND (t, i), val);
7511 }
7512 return val;
7513 }
7514 }
7515
7516 /* Constructors for pointer, array and function types.
7517 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7518 constructed by language-dependent code, not here.) */
7519
7520 /* Construct, lay out and return the type of pointers to TO_TYPE with
7521 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7522 reference all of memory. If such a type has already been
7523 constructed, reuse it. */
7524
7525 tree
7526 build_pointer_type_for_mode (tree to_type, enum machine_mode mode,
7527 bool can_alias_all)
7528 {
7529 tree t;
7530
7531 if (to_type == error_mark_node)
7532 return error_mark_node;
7533
7534 /* If the pointed-to type has the may_alias attribute set, force
7535 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7536 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7537 can_alias_all = true;
7538
7539 /* In some cases, languages will have things that aren't a POINTER_TYPE
7540 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7541 In that case, return that type without regard to the rest of our
7542 operands.
7543
7544 ??? This is a kludge, but consistent with the way this function has
7545 always operated and there doesn't seem to be a good way to avoid this
7546 at the moment. */
7547 if (TYPE_POINTER_TO (to_type) != 0
7548 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7549 return TYPE_POINTER_TO (to_type);
7550
7551 /* First, if we already have a type for pointers to TO_TYPE and it's
7552 the proper mode, use it. */
7553 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7554 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7555 return t;
7556
7557 t = make_node (POINTER_TYPE);
7558
7559 TREE_TYPE (t) = to_type;
7560 SET_TYPE_MODE (t, mode);
7561 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7562 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7563 TYPE_POINTER_TO (to_type) = t;
7564
7565 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7566 SET_TYPE_STRUCTURAL_EQUALITY (t);
7567 else if (TYPE_CANONICAL (to_type) != to_type)
7568 TYPE_CANONICAL (t)
7569 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7570 mode, can_alias_all);
7571
7572 /* Lay out the type. This function has many callers that are concerned
7573 with expression-construction, and this simplifies them all. */
7574 layout_type (t);
7575
7576 return t;
7577 }
7578
7579 /* By default build pointers in ptr_mode. */
7580
7581 tree
7582 build_pointer_type (tree to_type)
7583 {
7584 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7585 : TYPE_ADDR_SPACE (to_type);
7586 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7587 return build_pointer_type_for_mode (to_type, pointer_mode, false);
7588 }
7589
7590 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7591
7592 tree
7593 build_reference_type_for_mode (tree to_type, enum machine_mode mode,
7594 bool can_alias_all)
7595 {
7596 tree t;
7597
7598 if (to_type == error_mark_node)
7599 return error_mark_node;
7600
7601 /* If the pointed-to type has the may_alias attribute set, force
7602 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7603 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7604 can_alias_all = true;
7605
7606 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7607 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7608 In that case, return that type without regard to the rest of our
7609 operands.
7610
7611 ??? This is a kludge, but consistent with the way this function has
7612 always operated and there doesn't seem to be a good way to avoid this
7613 at the moment. */
7614 if (TYPE_REFERENCE_TO (to_type) != 0
7615 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7616 return TYPE_REFERENCE_TO (to_type);
7617
7618 /* First, if we already have a type for pointers to TO_TYPE and it's
7619 the proper mode, use it. */
7620 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7621 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7622 return t;
7623
7624 t = make_node (REFERENCE_TYPE);
7625
7626 TREE_TYPE (t) = to_type;
7627 SET_TYPE_MODE (t, mode);
7628 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7629 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7630 TYPE_REFERENCE_TO (to_type) = t;
7631
7632 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7633 SET_TYPE_STRUCTURAL_EQUALITY (t);
7634 else if (TYPE_CANONICAL (to_type) != to_type)
7635 TYPE_CANONICAL (t)
7636 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7637 mode, can_alias_all);
7638
7639 layout_type (t);
7640
7641 return t;
7642 }
7643
7644
7645 /* Build the node for the type of references-to-TO_TYPE by default
7646 in ptr_mode. */
7647
7648 tree
7649 build_reference_type (tree to_type)
7650 {
7651 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7652 : TYPE_ADDR_SPACE (to_type);
7653 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7654 return build_reference_type_for_mode (to_type, pointer_mode, false);
7655 }
7656
7657 #define MAX_INT_CACHED_PREC \
7658 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7659 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7660
7661 /* Builds a signed or unsigned integer type of precision PRECISION.
7662 Used for C bitfields whose precision does not match that of
7663 built-in target types. */
7664 tree
7665 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7666 int unsignedp)
7667 {
7668 tree itype, ret;
7669
7670 if (unsignedp)
7671 unsignedp = MAX_INT_CACHED_PREC + 1;
7672
7673 if (precision <= MAX_INT_CACHED_PREC)
7674 {
7675 itype = nonstandard_integer_type_cache[precision + unsignedp];
7676 if (itype)
7677 return itype;
7678 }
7679
7680 itype = make_node (INTEGER_TYPE);
7681 TYPE_PRECISION (itype) = precision;
7682
7683 if (unsignedp)
7684 fixup_unsigned_type (itype);
7685 else
7686 fixup_signed_type (itype);
7687
7688 ret = itype;
7689 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype)))
7690 ret = type_hash_canon (tree_low_cst (TYPE_MAX_VALUE (itype), 1), itype);
7691 if (precision <= MAX_INT_CACHED_PREC)
7692 nonstandard_integer_type_cache[precision + unsignedp] = ret;
7693
7694 return ret;
7695 }
7696
7697 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7698 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7699 is true, reuse such a type that has already been constructed. */
7700
7701 static tree
7702 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7703 {
7704 tree itype = make_node (INTEGER_TYPE);
7705 hashval_t hashcode = 0;
7706
7707 TREE_TYPE (itype) = type;
7708
7709 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7710 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7711
7712 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7713 SET_TYPE_MODE (itype, TYPE_MODE (type));
7714 TYPE_SIZE (itype) = TYPE_SIZE (type);
7715 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7716 TYPE_ALIGN (itype) = TYPE_ALIGN (type);
7717 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7718
7719 if (!shared)
7720 return itype;
7721
7722 if ((TYPE_MIN_VALUE (itype)
7723 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7724 || (TYPE_MAX_VALUE (itype)
7725 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7726 {
7727 /* Since we cannot reliably merge this type, we need to compare it using
7728 structural equality checks. */
7729 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7730 return itype;
7731 }
7732
7733 hashcode = iterative_hash_expr (TYPE_MIN_VALUE (itype), hashcode);
7734 hashcode = iterative_hash_expr (TYPE_MAX_VALUE (itype), hashcode);
7735 hashcode = iterative_hash_hashval_t (TYPE_HASH (type), hashcode);
7736 itype = type_hash_canon (hashcode, itype);
7737
7738 return itype;
7739 }
7740
7741 /* Wrapper around build_range_type_1 with SHARED set to true. */
7742
7743 tree
7744 build_range_type (tree type, tree lowval, tree highval)
7745 {
7746 return build_range_type_1 (type, lowval, highval, true);
7747 }
7748
7749 /* Wrapper around build_range_type_1 with SHARED set to false. */
7750
7751 tree
7752 build_nonshared_range_type (tree type, tree lowval, tree highval)
7753 {
7754 return build_range_type_1 (type, lowval, highval, false);
7755 }
7756
7757 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7758 MAXVAL should be the maximum value in the domain
7759 (one less than the length of the array).
7760
7761 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7762 We don't enforce this limit, that is up to caller (e.g. language front end).
7763 The limit exists because the result is a signed type and we don't handle
7764 sizes that use more than one HOST_WIDE_INT. */
7765
7766 tree
7767 build_index_type (tree maxval)
7768 {
7769 return build_range_type (sizetype, size_zero_node, maxval);
7770 }
7771
7772 /* Return true if the debug information for TYPE, a subtype, should be emitted
7773 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7774 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7775 debug info and doesn't reflect the source code. */
7776
7777 bool
7778 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7779 {
7780 tree base_type = TREE_TYPE (type), low, high;
7781
7782 /* Subrange types have a base type which is an integral type. */
7783 if (!INTEGRAL_TYPE_P (base_type))
7784 return false;
7785
7786 /* Get the real bounds of the subtype. */
7787 if (lang_hooks.types.get_subrange_bounds)
7788 lang_hooks.types.get_subrange_bounds (type, &low, &high);
7789 else
7790 {
7791 low = TYPE_MIN_VALUE (type);
7792 high = TYPE_MAX_VALUE (type);
7793 }
7794
7795 /* If the type and its base type have the same representation and the same
7796 name, then the type is not a subrange but a copy of the base type. */
7797 if ((TREE_CODE (base_type) == INTEGER_TYPE
7798 || TREE_CODE (base_type) == BOOLEAN_TYPE)
7799 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
7800 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
7801 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type)))
7802 {
7803 tree type_name = TYPE_NAME (type);
7804 tree base_type_name = TYPE_NAME (base_type);
7805
7806 if (type_name && TREE_CODE (type_name) == TYPE_DECL)
7807 type_name = DECL_NAME (type_name);
7808
7809 if (base_type_name && TREE_CODE (base_type_name) == TYPE_DECL)
7810 base_type_name = DECL_NAME (base_type_name);
7811
7812 if (type_name == base_type_name)
7813 return false;
7814 }
7815
7816 if (lowval)
7817 *lowval = low;
7818 if (highval)
7819 *highval = high;
7820 return true;
7821 }
7822
7823 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7824 and number of elements specified by the range of values of INDEX_TYPE.
7825 If SHARED is true, reuse such a type that has already been constructed. */
7826
7827 static tree
7828 build_array_type_1 (tree elt_type, tree index_type, bool shared)
7829 {
7830 tree t;
7831
7832 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
7833 {
7834 error ("arrays of functions are not meaningful");
7835 elt_type = integer_type_node;
7836 }
7837
7838 t = make_node (ARRAY_TYPE);
7839 TREE_TYPE (t) = elt_type;
7840 TYPE_DOMAIN (t) = index_type;
7841 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
7842 layout_type (t);
7843
7844 /* If the element type is incomplete at this point we get marked for
7845 structural equality. Do not record these types in the canonical
7846 type hashtable. */
7847 if (TYPE_STRUCTURAL_EQUALITY_P (t))
7848 return t;
7849
7850 if (shared)
7851 {
7852 hashval_t hashcode = iterative_hash_object (TYPE_HASH (elt_type), 0);
7853 if (index_type)
7854 hashcode = iterative_hash_object (TYPE_HASH (index_type), hashcode);
7855 t = type_hash_canon (hashcode, t);
7856 }
7857
7858 if (TYPE_CANONICAL (t) == t)
7859 {
7860 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
7861 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type)))
7862 SET_TYPE_STRUCTURAL_EQUALITY (t);
7863 else if (TYPE_CANONICAL (elt_type) != elt_type
7864 || (index_type && TYPE_CANONICAL (index_type) != index_type))
7865 TYPE_CANONICAL (t)
7866 = build_array_type_1 (TYPE_CANONICAL (elt_type),
7867 index_type
7868 ? TYPE_CANONICAL (index_type) : NULL_TREE,
7869 shared);
7870 }
7871
7872 return t;
7873 }
7874
7875 /* Wrapper around build_array_type_1 with SHARED set to true. */
7876
7877 tree
7878 build_array_type (tree elt_type, tree index_type)
7879 {
7880 return build_array_type_1 (elt_type, index_type, true);
7881 }
7882
7883 /* Wrapper around build_array_type_1 with SHARED set to false. */
7884
7885 tree
7886 build_nonshared_array_type (tree elt_type, tree index_type)
7887 {
7888 return build_array_type_1 (elt_type, index_type, false);
7889 }
7890
7891 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7892 sizetype. */
7893
7894 tree
7895 build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
7896 {
7897 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
7898 }
7899
7900 /* Recursively examines the array elements of TYPE, until a non-array
7901 element type is found. */
7902
7903 tree
7904 strip_array_types (tree type)
7905 {
7906 while (TREE_CODE (type) == ARRAY_TYPE)
7907 type = TREE_TYPE (type);
7908
7909 return type;
7910 }
7911
7912 /* Computes the canonical argument types from the argument type list
7913 ARGTYPES.
7914
7915 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7916 on entry to this function, or if any of the ARGTYPES are
7917 structural.
7918
7919 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7920 true on entry to this function, or if any of the ARGTYPES are
7921 non-canonical.
7922
7923 Returns a canonical argument list, which may be ARGTYPES when the
7924 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7925 true) or would not differ from ARGTYPES. */
7926
7927 static tree
7928 maybe_canonicalize_argtypes (tree argtypes,
7929 bool *any_structural_p,
7930 bool *any_noncanonical_p)
7931 {
7932 tree arg;
7933 bool any_noncanonical_argtypes_p = false;
7934
7935 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
7936 {
7937 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
7938 /* Fail gracefully by stating that the type is structural. */
7939 *any_structural_p = true;
7940 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
7941 *any_structural_p = true;
7942 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
7943 || TREE_PURPOSE (arg))
7944 /* If the argument has a default argument, we consider it
7945 non-canonical even though the type itself is canonical.
7946 That way, different variants of function and method types
7947 with default arguments will all point to the variant with
7948 no defaults as their canonical type. */
7949 any_noncanonical_argtypes_p = true;
7950 }
7951
7952 if (*any_structural_p)
7953 return argtypes;
7954
7955 if (any_noncanonical_argtypes_p)
7956 {
7957 /* Build the canonical list of argument types. */
7958 tree canon_argtypes = NULL_TREE;
7959 bool is_void = false;
7960
7961 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
7962 {
7963 if (arg == void_list_node)
7964 is_void = true;
7965 else
7966 canon_argtypes = tree_cons (NULL_TREE,
7967 TYPE_CANONICAL (TREE_VALUE (arg)),
7968 canon_argtypes);
7969 }
7970
7971 canon_argtypes = nreverse (canon_argtypes);
7972 if (is_void)
7973 canon_argtypes = chainon (canon_argtypes, void_list_node);
7974
7975 /* There is a non-canonical type. */
7976 *any_noncanonical_p = true;
7977 return canon_argtypes;
7978 }
7979
7980 /* The canonical argument types are the same as ARGTYPES. */
7981 return argtypes;
7982 }
7983
7984 /* Construct, lay out and return
7985 the type of functions returning type VALUE_TYPE
7986 given arguments of types ARG_TYPES.
7987 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
7988 are data type nodes for the arguments of the function.
7989 If such a type has already been constructed, reuse it. */
7990
7991 tree
7992 build_function_type (tree value_type, tree arg_types)
7993 {
7994 tree t;
7995 hashval_t hashcode = 0;
7996 bool any_structural_p, any_noncanonical_p;
7997 tree canon_argtypes;
7998
7999 if (TREE_CODE (value_type) == FUNCTION_TYPE)
8000 {
8001 error ("function return type cannot be function");
8002 value_type = integer_type_node;
8003 }
8004
8005 /* Make a node of the sort we want. */
8006 t = make_node (FUNCTION_TYPE);
8007 TREE_TYPE (t) = value_type;
8008 TYPE_ARG_TYPES (t) = arg_types;
8009
8010 /* If we already have such a type, use the old one. */
8011 hashcode = iterative_hash_object (TYPE_HASH (value_type), hashcode);
8012 hashcode = type_hash_list (arg_types, hashcode);
8013 t = type_hash_canon (hashcode, t);
8014
8015 /* Set up the canonical type. */
8016 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8017 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8018 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8019 &any_structural_p,
8020 &any_noncanonical_p);
8021 if (any_structural_p)
8022 SET_TYPE_STRUCTURAL_EQUALITY (t);
8023 else if (any_noncanonical_p)
8024 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8025 canon_argtypes);
8026
8027 if (!COMPLETE_TYPE_P (t))
8028 layout_type (t);
8029 return t;
8030 }
8031
8032 /* Build a function type. The RETURN_TYPE is the type returned by the
8033 function. If VAARGS is set, no void_type_node is appended to the
8034 the list. ARGP must be always be terminated be a NULL_TREE. */
8035
8036 static tree
8037 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8038 {
8039 tree t, args, last;
8040
8041 t = va_arg (argp, tree);
8042 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8043 args = tree_cons (NULL_TREE, t, args);
8044
8045 if (vaargs)
8046 {
8047 last = args;
8048 if (args != NULL_TREE)
8049 args = nreverse (args);
8050 gcc_assert (last != void_list_node);
8051 }
8052 else if (args == NULL_TREE)
8053 args = void_list_node;
8054 else
8055 {
8056 last = args;
8057 args = nreverse (args);
8058 TREE_CHAIN (last) = void_list_node;
8059 }
8060 args = build_function_type (return_type, args);
8061
8062 return args;
8063 }
8064
8065 /* Build a function type. The RETURN_TYPE is the type returned by the
8066 function. If additional arguments are provided, they are
8067 additional argument types. The list of argument types must always
8068 be terminated by NULL_TREE. */
8069
8070 tree
8071 build_function_type_list (tree return_type, ...)
8072 {
8073 tree args;
8074 va_list p;
8075
8076 va_start (p, return_type);
8077 args = build_function_type_list_1 (false, return_type, p);
8078 va_end (p);
8079 return args;
8080 }
8081
8082 /* Build a variable argument function type. The RETURN_TYPE is the
8083 type returned by the function. If additional arguments are provided,
8084 they are additional argument types. The list of argument types must
8085 always be terminated by NULL_TREE. */
8086
8087 tree
8088 build_varargs_function_type_list (tree return_type, ...)
8089 {
8090 tree args;
8091 va_list p;
8092
8093 va_start (p, return_type);
8094 args = build_function_type_list_1 (true, return_type, p);
8095 va_end (p);
8096
8097 return args;
8098 }
8099
8100 /* Build a function type. RETURN_TYPE is the type returned by the
8101 function; VAARGS indicates whether the function takes varargs. The
8102 function takes N named arguments, the types of which are provided in
8103 ARG_TYPES. */
8104
8105 static tree
8106 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8107 tree *arg_types)
8108 {
8109 int i;
8110 tree t = vaargs ? NULL_TREE : void_list_node;
8111
8112 for (i = n - 1; i >= 0; i--)
8113 t = tree_cons (NULL_TREE, arg_types[i], t);
8114
8115 return build_function_type (return_type, t);
8116 }
8117
8118 /* Build a function type. RETURN_TYPE is the type returned by the
8119 function. The function takes N named arguments, the types of which
8120 are provided in ARG_TYPES. */
8121
8122 tree
8123 build_function_type_array (tree return_type, int n, tree *arg_types)
8124 {
8125 return build_function_type_array_1 (false, return_type, n, arg_types);
8126 }
8127
8128 /* Build a variable argument function type. RETURN_TYPE is the type
8129 returned by the function. The function takes N named arguments, the
8130 types of which are provided in ARG_TYPES. */
8131
8132 tree
8133 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8134 {
8135 return build_function_type_array_1 (true, return_type, n, arg_types);
8136 }
8137
8138 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8139 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8140 for the method. An implicit additional parameter (of type
8141 pointer-to-BASETYPE) is added to the ARGTYPES. */
8142
8143 tree
8144 build_method_type_directly (tree basetype,
8145 tree rettype,
8146 tree argtypes)
8147 {
8148 tree t;
8149 tree ptype;
8150 int hashcode = 0;
8151 bool any_structural_p, any_noncanonical_p;
8152 tree canon_argtypes;
8153
8154 /* Make a node of the sort we want. */
8155 t = make_node (METHOD_TYPE);
8156
8157 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8158 TREE_TYPE (t) = rettype;
8159 ptype = build_pointer_type (basetype);
8160
8161 /* The actual arglist for this function includes a "hidden" argument
8162 which is "this". Put it into the list of argument types. */
8163 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8164 TYPE_ARG_TYPES (t) = argtypes;
8165
8166 /* If we already have such a type, use the old one. */
8167 hashcode = iterative_hash_object (TYPE_HASH (basetype), hashcode);
8168 hashcode = iterative_hash_object (TYPE_HASH (rettype), hashcode);
8169 hashcode = type_hash_list (argtypes, hashcode);
8170 t = type_hash_canon (hashcode, t);
8171
8172 /* Set up the canonical type. */
8173 any_structural_p
8174 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8175 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8176 any_noncanonical_p
8177 = (TYPE_CANONICAL (basetype) != basetype
8178 || TYPE_CANONICAL (rettype) != rettype);
8179 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8180 &any_structural_p,
8181 &any_noncanonical_p);
8182 if (any_structural_p)
8183 SET_TYPE_STRUCTURAL_EQUALITY (t);
8184 else if (any_noncanonical_p)
8185 TYPE_CANONICAL (t)
8186 = build_method_type_directly (TYPE_CANONICAL (basetype),
8187 TYPE_CANONICAL (rettype),
8188 canon_argtypes);
8189 if (!COMPLETE_TYPE_P (t))
8190 layout_type (t);
8191
8192 return t;
8193 }
8194
8195 /* Construct, lay out and return the type of methods belonging to class
8196 BASETYPE and whose arguments and values are described by TYPE.
8197 If that type exists already, reuse it.
8198 TYPE must be a FUNCTION_TYPE node. */
8199
8200 tree
8201 build_method_type (tree basetype, tree type)
8202 {
8203 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8204
8205 return build_method_type_directly (basetype,
8206 TREE_TYPE (type),
8207 TYPE_ARG_TYPES (type));
8208 }
8209
8210 /* Construct, lay out and return the type of offsets to a value
8211 of type TYPE, within an object of type BASETYPE.
8212 If a suitable offset type exists already, reuse it. */
8213
8214 tree
8215 build_offset_type (tree basetype, tree type)
8216 {
8217 tree t;
8218 hashval_t hashcode = 0;
8219
8220 /* Make a node of the sort we want. */
8221 t = make_node (OFFSET_TYPE);
8222
8223 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8224 TREE_TYPE (t) = type;
8225
8226 /* If we already have such a type, use the old one. */
8227 hashcode = iterative_hash_object (TYPE_HASH (basetype), hashcode);
8228 hashcode = iterative_hash_object (TYPE_HASH (type), hashcode);
8229 t = type_hash_canon (hashcode, t);
8230
8231 if (!COMPLETE_TYPE_P (t))
8232 layout_type (t);
8233
8234 if (TYPE_CANONICAL (t) == t)
8235 {
8236 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8237 || TYPE_STRUCTURAL_EQUALITY_P (type))
8238 SET_TYPE_STRUCTURAL_EQUALITY (t);
8239 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8240 || TYPE_CANONICAL (type) != type)
8241 TYPE_CANONICAL (t)
8242 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8243 TYPE_CANONICAL (type));
8244 }
8245
8246 return t;
8247 }
8248
8249 /* Create a complex type whose components are COMPONENT_TYPE. */
8250
8251 tree
8252 build_complex_type (tree component_type)
8253 {
8254 tree t;
8255 hashval_t hashcode;
8256
8257 gcc_assert (INTEGRAL_TYPE_P (component_type)
8258 || SCALAR_FLOAT_TYPE_P (component_type)
8259 || FIXED_POINT_TYPE_P (component_type));
8260
8261 /* Make a node of the sort we want. */
8262 t = make_node (COMPLEX_TYPE);
8263
8264 TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type);
8265
8266 /* If we already have such a type, use the old one. */
8267 hashcode = iterative_hash_object (TYPE_HASH (component_type), 0);
8268 t = type_hash_canon (hashcode, t);
8269
8270 if (!COMPLETE_TYPE_P (t))
8271 layout_type (t);
8272
8273 if (TYPE_CANONICAL (t) == t)
8274 {
8275 if (TYPE_STRUCTURAL_EQUALITY_P (component_type))
8276 SET_TYPE_STRUCTURAL_EQUALITY (t);
8277 else if (TYPE_CANONICAL (component_type) != component_type)
8278 TYPE_CANONICAL (t)
8279 = build_complex_type (TYPE_CANONICAL (component_type));
8280 }
8281
8282 /* We need to create a name, since complex is a fundamental type. */
8283 if (! TYPE_NAME (t))
8284 {
8285 const char *name;
8286 if (component_type == char_type_node)
8287 name = "complex char";
8288 else if (component_type == signed_char_type_node)
8289 name = "complex signed char";
8290 else if (component_type == unsigned_char_type_node)
8291 name = "complex unsigned char";
8292 else if (component_type == short_integer_type_node)
8293 name = "complex short int";
8294 else if (component_type == short_unsigned_type_node)
8295 name = "complex short unsigned int";
8296 else if (component_type == integer_type_node)
8297 name = "complex int";
8298 else if (component_type == unsigned_type_node)
8299 name = "complex unsigned int";
8300 else if (component_type == long_integer_type_node)
8301 name = "complex long int";
8302 else if (component_type == long_unsigned_type_node)
8303 name = "complex long unsigned int";
8304 else if (component_type == long_long_integer_type_node)
8305 name = "complex long long int";
8306 else if (component_type == long_long_unsigned_type_node)
8307 name = "complex long long unsigned int";
8308 else
8309 name = 0;
8310
8311 if (name != 0)
8312 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8313 get_identifier (name), t);
8314 }
8315
8316 return build_qualified_type (t, TYPE_QUALS (component_type));
8317 }
8318
8319 /* If TYPE is a real or complex floating-point type and the target
8320 does not directly support arithmetic on TYPE then return the wider
8321 type to be used for arithmetic on TYPE. Otherwise, return
8322 NULL_TREE. */
8323
8324 tree
8325 excess_precision_type (tree type)
8326 {
8327 if (flag_excess_precision != EXCESS_PRECISION_FAST)
8328 {
8329 int flt_eval_method = TARGET_FLT_EVAL_METHOD;
8330 switch (TREE_CODE (type))
8331 {
8332 case REAL_TYPE:
8333 switch (flt_eval_method)
8334 {
8335 case 1:
8336 if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
8337 return double_type_node;
8338 break;
8339 case 2:
8340 if (TYPE_MODE (type) == TYPE_MODE (float_type_node)
8341 || TYPE_MODE (type) == TYPE_MODE (double_type_node))
8342 return long_double_type_node;
8343 break;
8344 default:
8345 gcc_unreachable ();
8346 }
8347 break;
8348 case COMPLEX_TYPE:
8349 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8350 return NULL_TREE;
8351 switch (flt_eval_method)
8352 {
8353 case 1:
8354 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node))
8355 return complex_double_type_node;
8356 break;
8357 case 2:
8358 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node)
8359 || (TYPE_MODE (TREE_TYPE (type))
8360 == TYPE_MODE (double_type_node)))
8361 return complex_long_double_type_node;
8362 break;
8363 default:
8364 gcc_unreachable ();
8365 }
8366 break;
8367 default:
8368 break;
8369 }
8370 }
8371 return NULL_TREE;
8372 }
8373 \f
8374 /* Return OP, stripped of any conversions to wider types as much as is safe.
8375 Converting the value back to OP's type makes a value equivalent to OP.
8376
8377 If FOR_TYPE is nonzero, we return a value which, if converted to
8378 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8379
8380 OP must have integer, real or enumeral type. Pointers are not allowed!
8381
8382 There are some cases where the obvious value we could return
8383 would regenerate to OP if converted to OP's type,
8384 but would not extend like OP to wider types.
8385 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8386 For example, if OP is (unsigned short)(signed char)-1,
8387 we avoid returning (signed char)-1 if FOR_TYPE is int,
8388 even though extending that to an unsigned short would regenerate OP,
8389 since the result of extending (signed char)-1 to (int)
8390 is different from (int) OP. */
8391
8392 tree
8393 get_unwidened (tree op, tree for_type)
8394 {
8395 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8396 tree type = TREE_TYPE (op);
8397 unsigned final_prec
8398 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8399 int uns
8400 = (for_type != 0 && for_type != type
8401 && final_prec > TYPE_PRECISION (type)
8402 && TYPE_UNSIGNED (type));
8403 tree win = op;
8404
8405 while (CONVERT_EXPR_P (op))
8406 {
8407 int bitschange;
8408
8409 /* TYPE_PRECISION on vector types has different meaning
8410 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8411 so avoid them here. */
8412 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8413 break;
8414
8415 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8416 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8417
8418 /* Truncations are many-one so cannot be removed.
8419 Unless we are later going to truncate down even farther. */
8420 if (bitschange < 0
8421 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8422 break;
8423
8424 /* See what's inside this conversion. If we decide to strip it,
8425 we will set WIN. */
8426 op = TREE_OPERAND (op, 0);
8427
8428 /* If we have not stripped any zero-extensions (uns is 0),
8429 we can strip any kind of extension.
8430 If we have previously stripped a zero-extension,
8431 only zero-extensions can safely be stripped.
8432 Any extension can be stripped if the bits it would produce
8433 are all going to be discarded later by truncating to FOR_TYPE. */
8434
8435 if (bitschange > 0)
8436 {
8437 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8438 win = op;
8439 /* TYPE_UNSIGNED says whether this is a zero-extension.
8440 Let's avoid computing it if it does not affect WIN
8441 and if UNS will not be needed again. */
8442 if ((uns
8443 || CONVERT_EXPR_P (op))
8444 && TYPE_UNSIGNED (TREE_TYPE (op)))
8445 {
8446 uns = 1;
8447 win = op;
8448 }
8449 }
8450 }
8451
8452 /* If we finally reach a constant see if it fits in for_type and
8453 in that case convert it. */
8454 if (for_type
8455 && TREE_CODE (win) == INTEGER_CST
8456 && TREE_TYPE (win) != for_type
8457 && int_fits_type_p (win, for_type))
8458 win = fold_convert (for_type, win);
8459
8460 return win;
8461 }
8462 \f
8463 /* Return OP or a simpler expression for a narrower value
8464 which can be sign-extended or zero-extended to give back OP.
8465 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8466 or 0 if the value should be sign-extended. */
8467
8468 tree
8469 get_narrower (tree op, int *unsignedp_ptr)
8470 {
8471 int uns = 0;
8472 int first = 1;
8473 tree win = op;
8474 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8475
8476 while (TREE_CODE (op) == NOP_EXPR)
8477 {
8478 int bitschange
8479 = (TYPE_PRECISION (TREE_TYPE (op))
8480 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8481
8482 /* Truncations are many-one so cannot be removed. */
8483 if (bitschange < 0)
8484 break;
8485
8486 /* See what's inside this conversion. If we decide to strip it,
8487 we will set WIN. */
8488
8489 if (bitschange > 0)
8490 {
8491 op = TREE_OPERAND (op, 0);
8492 /* An extension: the outermost one can be stripped,
8493 but remember whether it is zero or sign extension. */
8494 if (first)
8495 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8496 /* Otherwise, if a sign extension has been stripped,
8497 only sign extensions can now be stripped;
8498 if a zero extension has been stripped, only zero-extensions. */
8499 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8500 break;
8501 first = 0;
8502 }
8503 else /* bitschange == 0 */
8504 {
8505 /* A change in nominal type can always be stripped, but we must
8506 preserve the unsignedness. */
8507 if (first)
8508 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8509 first = 0;
8510 op = TREE_OPERAND (op, 0);
8511 /* Keep trying to narrow, but don't assign op to win if it
8512 would turn an integral type into something else. */
8513 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8514 continue;
8515 }
8516
8517 win = op;
8518 }
8519
8520 if (TREE_CODE (op) == COMPONENT_REF
8521 /* Since type_for_size always gives an integer type. */
8522 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8523 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8524 /* Ensure field is laid out already. */
8525 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8526 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8527 {
8528 unsigned HOST_WIDE_INT innerprec
8529 = tree_low_cst (DECL_SIZE (TREE_OPERAND (op, 1)), 1);
8530 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8531 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8532 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8533
8534 /* We can get this structure field in a narrower type that fits it,
8535 but the resulting extension to its nominal type (a fullword type)
8536 must satisfy the same conditions as for other extensions.
8537
8538 Do this only for fields that are aligned (not bit-fields),
8539 because when bit-field insns will be used there is no
8540 advantage in doing this. */
8541
8542 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8543 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8544 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8545 && type != 0)
8546 {
8547 if (first)
8548 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8549 win = fold_convert (type, op);
8550 }
8551 }
8552
8553 *unsignedp_ptr = uns;
8554 return win;
8555 }
8556 \f
8557 /* Returns true if integer constant C has a value that is permissible
8558 for type TYPE (an INTEGER_TYPE). */
8559
8560 bool
8561 int_fits_type_p (const_tree c, const_tree type)
8562 {
8563 tree type_low_bound, type_high_bound;
8564 bool ok_for_low_bound, ok_for_high_bound, unsc;
8565 double_int dc, dd;
8566
8567 dc = tree_to_double_int (c);
8568 unsc = TYPE_UNSIGNED (TREE_TYPE (c));
8569
8570 retry:
8571 type_low_bound = TYPE_MIN_VALUE (type);
8572 type_high_bound = TYPE_MAX_VALUE (type);
8573
8574 /* If at least one bound of the type is a constant integer, we can check
8575 ourselves and maybe make a decision. If no such decision is possible, but
8576 this type is a subtype, try checking against that. Otherwise, use
8577 double_int_fits_to_tree_p, which checks against the precision.
8578
8579 Compute the status for each possibly constant bound, and return if we see
8580 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8581 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8582 for "constant known to fit". */
8583
8584 /* Check if c >= type_low_bound. */
8585 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8586 {
8587 dd = tree_to_double_int (type_low_bound);
8588 if (unsc != TYPE_UNSIGNED (TREE_TYPE (type_low_bound)))
8589 {
8590 int c_neg = (!unsc && dc.is_negative ());
8591 int t_neg = (unsc && dd.is_negative ());
8592
8593 if (c_neg && !t_neg)
8594 return false;
8595 if ((c_neg || !t_neg) && dc.ult (dd))
8596 return false;
8597 }
8598 else if (dc.cmp (dd, unsc) < 0)
8599 return false;
8600 ok_for_low_bound = true;
8601 }
8602 else
8603 ok_for_low_bound = false;
8604
8605 /* Check if c <= type_high_bound. */
8606 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8607 {
8608 dd = tree_to_double_int (type_high_bound);
8609 if (unsc != TYPE_UNSIGNED (TREE_TYPE (type_high_bound)))
8610 {
8611 int c_neg = (!unsc && dc.is_negative ());
8612 int t_neg = (unsc && dd.is_negative ());
8613
8614 if (t_neg && !c_neg)
8615 return false;
8616 if ((t_neg || !c_neg) && dc.ugt (dd))
8617 return false;
8618 }
8619 else if (dc.cmp (dd, unsc) > 0)
8620 return false;
8621 ok_for_high_bound = true;
8622 }
8623 else
8624 ok_for_high_bound = false;
8625
8626 /* If the constant fits both bounds, the result is known. */
8627 if (ok_for_low_bound && ok_for_high_bound)
8628 return true;
8629
8630 /* Perform some generic filtering which may allow making a decision
8631 even if the bounds are not constant. First, negative integers
8632 never fit in unsigned types, */
8633 if (TYPE_UNSIGNED (type) && !unsc && dc.is_negative ())
8634 return false;
8635
8636 /* Second, narrower types always fit in wider ones. */
8637 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8638 return true;
8639
8640 /* Third, unsigned integers with top bit set never fit signed types. */
8641 if (! TYPE_UNSIGNED (type) && unsc)
8642 {
8643 int prec = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (c))) - 1;
8644 if (prec < HOST_BITS_PER_WIDE_INT)
8645 {
8646 if (((((unsigned HOST_WIDE_INT) 1) << prec) & dc.low) != 0)
8647 return false;
8648 }
8649 else if (((((unsigned HOST_WIDE_INT) 1)
8650 << (prec - HOST_BITS_PER_WIDE_INT)) & dc.high) != 0)
8651 return false;
8652 }
8653
8654 /* If we haven't been able to decide at this point, there nothing more we
8655 can check ourselves here. Look at the base type if we have one and it
8656 has the same precision. */
8657 if (TREE_CODE (type) == INTEGER_TYPE
8658 && TREE_TYPE (type) != 0
8659 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8660 {
8661 type = TREE_TYPE (type);
8662 goto retry;
8663 }
8664
8665 /* Or to double_int_fits_to_tree_p, if nothing else. */
8666 return double_int_fits_to_tree_p (type, dc);
8667 }
8668
8669 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8670 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8671 represented (assuming two's-complement arithmetic) within the bit
8672 precision of the type are returned instead. */
8673
8674 void
8675 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8676 {
8677 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8678 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8679 mpz_set_double_int (min, tree_to_double_int (TYPE_MIN_VALUE (type)),
8680 TYPE_UNSIGNED (type));
8681 else
8682 {
8683 if (TYPE_UNSIGNED (type))
8684 mpz_set_ui (min, 0);
8685 else
8686 {
8687 double_int mn;
8688 mn = double_int::mask (TYPE_PRECISION (type) - 1);
8689 mn = (mn + double_int_one).sext (TYPE_PRECISION (type));
8690 mpz_set_double_int (min, mn, false);
8691 }
8692 }
8693
8694 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8695 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8696 mpz_set_double_int (max, tree_to_double_int (TYPE_MAX_VALUE (type)),
8697 TYPE_UNSIGNED (type));
8698 else
8699 {
8700 if (TYPE_UNSIGNED (type))
8701 mpz_set_double_int (max, double_int::mask (TYPE_PRECISION (type)),
8702 true);
8703 else
8704 mpz_set_double_int (max, double_int::mask (TYPE_PRECISION (type) - 1),
8705 true);
8706 }
8707 }
8708
8709 /* Return true if VAR is an automatic variable defined in function FN. */
8710
8711 bool
8712 auto_var_in_fn_p (const_tree var, const_tree fn)
8713 {
8714 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8715 && ((((TREE_CODE (var) == VAR_DECL && ! DECL_EXTERNAL (var))
8716 || TREE_CODE (var) == PARM_DECL)
8717 && ! TREE_STATIC (var))
8718 || TREE_CODE (var) == LABEL_DECL
8719 || TREE_CODE (var) == RESULT_DECL));
8720 }
8721
8722 /* Subprogram of following function. Called by walk_tree.
8723
8724 Return *TP if it is an automatic variable or parameter of the
8725 function passed in as DATA. */
8726
8727 static tree
8728 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8729 {
8730 tree fn = (tree) data;
8731
8732 if (TYPE_P (*tp))
8733 *walk_subtrees = 0;
8734
8735 else if (DECL_P (*tp)
8736 && auto_var_in_fn_p (*tp, fn))
8737 return *tp;
8738
8739 return NULL_TREE;
8740 }
8741
8742 /* Returns true if T is, contains, or refers to a type with variable
8743 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8744 arguments, but not the return type. If FN is nonzero, only return
8745 true if a modifier of the type or position of FN is a variable or
8746 parameter inside FN.
8747
8748 This concept is more general than that of C99 'variably modified types':
8749 in C99, a struct type is never variably modified because a VLA may not
8750 appear as a structure member. However, in GNU C code like:
8751
8752 struct S { int i[f()]; };
8753
8754 is valid, and other languages may define similar constructs. */
8755
8756 bool
8757 variably_modified_type_p (tree type, tree fn)
8758 {
8759 tree t;
8760
8761 /* Test if T is either variable (if FN is zero) or an expression containing
8762 a variable in FN. If TYPE isn't gimplified, return true also if
8763 gimplify_one_sizepos would gimplify the expression into a local
8764 variable. */
8765 #define RETURN_TRUE_IF_VAR(T) \
8766 do { tree _t = (T); \
8767 if (_t != NULL_TREE \
8768 && _t != error_mark_node \
8769 && TREE_CODE (_t) != INTEGER_CST \
8770 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8771 && (!fn \
8772 || (!TYPE_SIZES_GIMPLIFIED (type) \
8773 && !is_gimple_sizepos (_t)) \
8774 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8775 return true; } while (0)
8776
8777 if (type == error_mark_node)
8778 return false;
8779
8780 /* If TYPE itself has variable size, it is variably modified. */
8781 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8782 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8783
8784 switch (TREE_CODE (type))
8785 {
8786 case POINTER_TYPE:
8787 case REFERENCE_TYPE:
8788 case VECTOR_TYPE:
8789 if (variably_modified_type_p (TREE_TYPE (type), fn))
8790 return true;
8791 break;
8792
8793 case FUNCTION_TYPE:
8794 case METHOD_TYPE:
8795 /* If TYPE is a function type, it is variably modified if the
8796 return type is variably modified. */
8797 if (variably_modified_type_p (TREE_TYPE (type), fn))
8798 return true;
8799 break;
8800
8801 case INTEGER_TYPE:
8802 case REAL_TYPE:
8803 case FIXED_POINT_TYPE:
8804 case ENUMERAL_TYPE:
8805 case BOOLEAN_TYPE:
8806 /* Scalar types are variably modified if their end points
8807 aren't constant. */
8808 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8809 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8810 break;
8811
8812 case RECORD_TYPE:
8813 case UNION_TYPE:
8814 case QUAL_UNION_TYPE:
8815 /* We can't see if any of the fields are variably-modified by the
8816 definition we normally use, since that would produce infinite
8817 recursion via pointers. */
8818 /* This is variably modified if some field's type is. */
8819 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8820 if (TREE_CODE (t) == FIELD_DECL)
8821 {
8822 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8823 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8824 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8825
8826 if (TREE_CODE (type) == QUAL_UNION_TYPE)
8827 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
8828 }
8829 break;
8830
8831 case ARRAY_TYPE:
8832 /* Do not call ourselves to avoid infinite recursion. This is
8833 variably modified if the element type is. */
8834 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
8835 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
8836 break;
8837
8838 default:
8839 break;
8840 }
8841
8842 /* The current language may have other cases to check, but in general,
8843 all other types are not variably modified. */
8844 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
8845
8846 #undef RETURN_TRUE_IF_VAR
8847 }
8848
8849 /* Given a DECL or TYPE, return the scope in which it was declared, or
8850 NULL_TREE if there is no containing scope. */
8851
8852 tree
8853 get_containing_scope (const_tree t)
8854 {
8855 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
8856 }
8857
8858 /* Return the innermost context enclosing DECL that is
8859 a FUNCTION_DECL, or zero if none. */
8860
8861 tree
8862 decl_function_context (const_tree decl)
8863 {
8864 tree context;
8865
8866 if (TREE_CODE (decl) == ERROR_MARK)
8867 return 0;
8868
8869 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8870 where we look up the function at runtime. Such functions always take
8871 a first argument of type 'pointer to real context'.
8872
8873 C++ should really be fixed to use DECL_CONTEXT for the real context,
8874 and use something else for the "virtual context". */
8875 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
8876 context
8877 = TYPE_MAIN_VARIANT
8878 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
8879 else
8880 context = DECL_CONTEXT (decl);
8881
8882 while (context && TREE_CODE (context) != FUNCTION_DECL)
8883 {
8884 if (TREE_CODE (context) == BLOCK)
8885 context = BLOCK_SUPERCONTEXT (context);
8886 else
8887 context = get_containing_scope (context);
8888 }
8889
8890 return context;
8891 }
8892
8893 /* Return the innermost context enclosing DECL that is
8894 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8895 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8896
8897 tree
8898 decl_type_context (const_tree decl)
8899 {
8900 tree context = DECL_CONTEXT (decl);
8901
8902 while (context)
8903 switch (TREE_CODE (context))
8904 {
8905 case NAMESPACE_DECL:
8906 case TRANSLATION_UNIT_DECL:
8907 return NULL_TREE;
8908
8909 case RECORD_TYPE:
8910 case UNION_TYPE:
8911 case QUAL_UNION_TYPE:
8912 return context;
8913
8914 case TYPE_DECL:
8915 case FUNCTION_DECL:
8916 context = DECL_CONTEXT (context);
8917 break;
8918
8919 case BLOCK:
8920 context = BLOCK_SUPERCONTEXT (context);
8921 break;
8922
8923 default:
8924 gcc_unreachable ();
8925 }
8926
8927 return NULL_TREE;
8928 }
8929
8930 /* CALL is a CALL_EXPR. Return the declaration for the function
8931 called, or NULL_TREE if the called function cannot be
8932 determined. */
8933
8934 tree
8935 get_callee_fndecl (const_tree call)
8936 {
8937 tree addr;
8938
8939 if (call == error_mark_node)
8940 return error_mark_node;
8941
8942 /* It's invalid to call this function with anything but a
8943 CALL_EXPR. */
8944 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8945
8946 /* The first operand to the CALL is the address of the function
8947 called. */
8948 addr = CALL_EXPR_FN (call);
8949
8950 STRIP_NOPS (addr);
8951
8952 /* If this is a readonly function pointer, extract its initial value. */
8953 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
8954 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
8955 && DECL_INITIAL (addr))
8956 addr = DECL_INITIAL (addr);
8957
8958 /* If the address is just `&f' for some function `f', then we know
8959 that `f' is being called. */
8960 if (TREE_CODE (addr) == ADDR_EXPR
8961 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
8962 return TREE_OPERAND (addr, 0);
8963
8964 /* We couldn't figure out what was being called. */
8965 return NULL_TREE;
8966 }
8967
8968 /* Print debugging information about tree nodes generated during the compile,
8969 and any language-specific information. */
8970
8971 void
8972 dump_tree_statistics (void)
8973 {
8974 if (GATHER_STATISTICS)
8975 {
8976 int i;
8977 int total_nodes, total_bytes;
8978 fprintf (stderr, "Kind Nodes Bytes\n");
8979 fprintf (stderr, "---------------------------------------\n");
8980 total_nodes = total_bytes = 0;
8981 for (i = 0; i < (int) all_kinds; i++)
8982 {
8983 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i],
8984 tree_node_counts[i], tree_node_sizes[i]);
8985 total_nodes += tree_node_counts[i];
8986 total_bytes += tree_node_sizes[i];
8987 }
8988 fprintf (stderr, "---------------------------------------\n");
8989 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes);
8990 fprintf (stderr, "---------------------------------------\n");
8991 fprintf (stderr, "Code Nodes\n");
8992 fprintf (stderr, "----------------------------\n");
8993 for (i = 0; i < (int) MAX_TREE_CODES; i++)
8994 fprintf (stderr, "%-20s %7d\n", get_tree_code_name ((enum tree_code) i),
8995 tree_code_counts[i]);
8996 fprintf (stderr, "----------------------------\n");
8997 ssanames_print_statistics ();
8998 phinodes_print_statistics ();
8999 }
9000 else
9001 fprintf (stderr, "(No per-node statistics)\n");
9002
9003 print_type_hash_statistics ();
9004 print_debug_expr_statistics ();
9005 print_value_expr_statistics ();
9006 lang_hooks.print_statistics ();
9007 }
9008 \f
9009 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9010
9011 /* Generate a crc32 of a byte. */
9012
9013 static unsigned
9014 crc32_unsigned_bits (unsigned chksum, unsigned value, unsigned bits)
9015 {
9016 unsigned ix;
9017
9018 for (ix = bits; ix--; value <<= 1)
9019 {
9020 unsigned feedback;
9021
9022 feedback = (value ^ chksum) & 0x80000000 ? 0x04c11db7 : 0;
9023 chksum <<= 1;
9024 chksum ^= feedback;
9025 }
9026 return chksum;
9027 }
9028
9029 /* Generate a crc32 of a 32-bit unsigned. */
9030
9031 unsigned
9032 crc32_unsigned (unsigned chksum, unsigned value)
9033 {
9034 return crc32_unsigned_bits (chksum, value, 32);
9035 }
9036
9037 /* Generate a crc32 of a byte. */
9038
9039 unsigned
9040 crc32_byte (unsigned chksum, char byte)
9041 {
9042 return crc32_unsigned_bits (chksum, (unsigned) byte << 24, 8);
9043 }
9044
9045 /* Generate a crc32 of a string. */
9046
9047 unsigned
9048 crc32_string (unsigned chksum, const char *string)
9049 {
9050 do
9051 {
9052 chksum = crc32_byte (chksum, *string);
9053 }
9054 while (*string++);
9055 return chksum;
9056 }
9057
9058 /* P is a string that will be used in a symbol. Mask out any characters
9059 that are not valid in that context. */
9060
9061 void
9062 clean_symbol_name (char *p)
9063 {
9064 for (; *p; p++)
9065 if (! (ISALNUM (*p)
9066 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9067 || *p == '$'
9068 #endif
9069 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9070 || *p == '.'
9071 #endif
9072 ))
9073 *p = '_';
9074 }
9075
9076 /* Generate a name for a special-purpose function.
9077 The generated name may need to be unique across the whole link.
9078 Changes to this function may also require corresponding changes to
9079 xstrdup_mask_random.
9080 TYPE is some string to identify the purpose of this function to the
9081 linker or collect2; it must start with an uppercase letter,
9082 one of:
9083 I - for constructors
9084 D - for destructors
9085 N - for C++ anonymous namespaces
9086 F - for DWARF unwind frame information. */
9087
9088 tree
9089 get_file_function_name (const char *type)
9090 {
9091 char *buf;
9092 const char *p;
9093 char *q;
9094
9095 /* If we already have a name we know to be unique, just use that. */
9096 if (first_global_object_name)
9097 p = q = ASTRDUP (first_global_object_name);
9098 /* If the target is handling the constructors/destructors, they
9099 will be local to this file and the name is only necessary for
9100 debugging purposes.
9101 We also assign sub_I and sub_D sufixes to constructors called from
9102 the global static constructors. These are always local. */
9103 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9104 || (strncmp (type, "sub_", 4) == 0
9105 && (type[4] == 'I' || type[4] == 'D')))
9106 {
9107 const char *file = main_input_filename;
9108 if (! file)
9109 file = input_filename;
9110 /* Just use the file's basename, because the full pathname
9111 might be quite long. */
9112 p = q = ASTRDUP (lbasename (file));
9113 }
9114 else
9115 {
9116 /* Otherwise, the name must be unique across the entire link.
9117 We don't have anything that we know to be unique to this translation
9118 unit, so use what we do have and throw in some randomness. */
9119 unsigned len;
9120 const char *name = weak_global_object_name;
9121 const char *file = main_input_filename;
9122
9123 if (! name)
9124 name = "";
9125 if (! file)
9126 file = input_filename;
9127
9128 len = strlen (file);
9129 q = (char *) alloca (9 + 17 + len + 1);
9130 memcpy (q, file, len + 1);
9131
9132 snprintf (q + len, 9 + 17 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9133 crc32_string (0, name), get_random_seed (false));
9134
9135 p = q;
9136 }
9137
9138 clean_symbol_name (q);
9139 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9140 + strlen (type));
9141
9142 /* Set up the name of the file-level functions we may need.
9143 Use a global object (which is already required to be unique over
9144 the program) rather than the file name (which imposes extra
9145 constraints). */
9146 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9147
9148 return get_identifier (buf);
9149 }
9150 \f
9151 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9152
9153 /* Complain that the tree code of NODE does not match the expected 0
9154 terminated list of trailing codes. The trailing code list can be
9155 empty, for a more vague error message. FILE, LINE, and FUNCTION
9156 are of the caller. */
9157
9158 void
9159 tree_check_failed (const_tree node, const char *file,
9160 int line, const char *function, ...)
9161 {
9162 va_list args;
9163 const char *buffer;
9164 unsigned length = 0;
9165 enum tree_code code;
9166
9167 va_start (args, function);
9168 while ((code = (enum tree_code) va_arg (args, int)))
9169 length += 4 + strlen (get_tree_code_name (code));
9170 va_end (args);
9171 if (length)
9172 {
9173 char *tmp;
9174 va_start (args, function);
9175 length += strlen ("expected ");
9176 buffer = tmp = (char *) alloca (length);
9177 length = 0;
9178 while ((code = (enum tree_code) va_arg (args, int)))
9179 {
9180 const char *prefix = length ? " or " : "expected ";
9181
9182 strcpy (tmp + length, prefix);
9183 length += strlen (prefix);
9184 strcpy (tmp + length, get_tree_code_name (code));
9185 length += strlen (get_tree_code_name (code));
9186 }
9187 va_end (args);
9188 }
9189 else
9190 buffer = "unexpected node";
9191
9192 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9193 buffer, get_tree_code_name (TREE_CODE (node)),
9194 function, trim_filename (file), line);
9195 }
9196
9197 /* Complain that the tree code of NODE does match the expected 0
9198 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9199 the caller. */
9200
9201 void
9202 tree_not_check_failed (const_tree node, const char *file,
9203 int line, const char *function, ...)
9204 {
9205 va_list args;
9206 char *buffer;
9207 unsigned length = 0;
9208 enum tree_code code;
9209
9210 va_start (args, function);
9211 while ((code = (enum tree_code) va_arg (args, int)))
9212 length += 4 + strlen (get_tree_code_name (code));
9213 va_end (args);
9214 va_start (args, function);
9215 buffer = (char *) alloca (length);
9216 length = 0;
9217 while ((code = (enum tree_code) va_arg (args, int)))
9218 {
9219 if (length)
9220 {
9221 strcpy (buffer + length, " or ");
9222 length += 4;
9223 }
9224 strcpy (buffer + length, get_tree_code_name (code));
9225 length += strlen (get_tree_code_name (code));
9226 }
9227 va_end (args);
9228
9229 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9230 buffer, get_tree_code_name (TREE_CODE (node)),
9231 function, trim_filename (file), line);
9232 }
9233
9234 /* Similar to tree_check_failed, except that we check for a class of tree
9235 code, given in CL. */
9236
9237 void
9238 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9239 const char *file, int line, const char *function)
9240 {
9241 internal_error
9242 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9243 TREE_CODE_CLASS_STRING (cl),
9244 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9245 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9246 }
9247
9248 /* Similar to tree_check_failed, except that instead of specifying a
9249 dozen codes, use the knowledge that they're all sequential. */
9250
9251 void
9252 tree_range_check_failed (const_tree node, const char *file, int line,
9253 const char *function, enum tree_code c1,
9254 enum tree_code c2)
9255 {
9256 char *buffer;
9257 unsigned length = 0;
9258 unsigned int c;
9259
9260 for (c = c1; c <= c2; ++c)
9261 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9262
9263 length += strlen ("expected ");
9264 buffer = (char *) alloca (length);
9265 length = 0;
9266
9267 for (c = c1; c <= c2; ++c)
9268 {
9269 const char *prefix = length ? " or " : "expected ";
9270
9271 strcpy (buffer + length, prefix);
9272 length += strlen (prefix);
9273 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9274 length += strlen (get_tree_code_name ((enum tree_code) c));
9275 }
9276
9277 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9278 buffer, get_tree_code_name (TREE_CODE (node)),
9279 function, trim_filename (file), line);
9280 }
9281
9282
9283 /* Similar to tree_check_failed, except that we check that a tree does
9284 not have the specified code, given in CL. */
9285
9286 void
9287 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9288 const char *file, int line, const char *function)
9289 {
9290 internal_error
9291 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9292 TREE_CODE_CLASS_STRING (cl),
9293 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9294 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9295 }
9296
9297
9298 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9299
9300 void
9301 omp_clause_check_failed (const_tree node, const char *file, int line,
9302 const char *function, enum omp_clause_code code)
9303 {
9304 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9305 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9306 function, trim_filename (file), line);
9307 }
9308
9309
9310 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9311
9312 void
9313 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9314 const char *function, enum omp_clause_code c1,
9315 enum omp_clause_code c2)
9316 {
9317 char *buffer;
9318 unsigned length = 0;
9319 unsigned int c;
9320
9321 for (c = c1; c <= c2; ++c)
9322 length += 4 + strlen (omp_clause_code_name[c]);
9323
9324 length += strlen ("expected ");
9325 buffer = (char *) alloca (length);
9326 length = 0;
9327
9328 for (c = c1; c <= c2; ++c)
9329 {
9330 const char *prefix = length ? " or " : "expected ";
9331
9332 strcpy (buffer + length, prefix);
9333 length += strlen (prefix);
9334 strcpy (buffer + length, omp_clause_code_name[c]);
9335 length += strlen (omp_clause_code_name[c]);
9336 }
9337
9338 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9339 buffer, omp_clause_code_name[TREE_CODE (node)],
9340 function, trim_filename (file), line);
9341 }
9342
9343
9344 #undef DEFTREESTRUCT
9345 #define DEFTREESTRUCT(VAL, NAME) NAME,
9346
9347 static const char *ts_enum_names[] = {
9348 #include "treestruct.def"
9349 };
9350 #undef DEFTREESTRUCT
9351
9352 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9353
9354 /* Similar to tree_class_check_failed, except that we check for
9355 whether CODE contains the tree structure identified by EN. */
9356
9357 void
9358 tree_contains_struct_check_failed (const_tree node,
9359 const enum tree_node_structure_enum en,
9360 const char *file, int line,
9361 const char *function)
9362 {
9363 internal_error
9364 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9365 TS_ENUM_NAME (en),
9366 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9367 }
9368
9369
9370 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9371 (dynamically sized) vector. */
9372
9373 void
9374 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9375 const char *function)
9376 {
9377 internal_error
9378 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9379 idx + 1, len, function, trim_filename (file), line);
9380 }
9381
9382 /* Similar to above, except that the check is for the bounds of the operand
9383 vector of an expression node EXP. */
9384
9385 void
9386 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9387 int line, const char *function)
9388 {
9389 enum tree_code code = TREE_CODE (exp);
9390 internal_error
9391 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9392 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9393 function, trim_filename (file), line);
9394 }
9395
9396 /* Similar to above, except that the check is for the number of
9397 operands of an OMP_CLAUSE node. */
9398
9399 void
9400 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9401 int line, const char *function)
9402 {
9403 internal_error
9404 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9405 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9406 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9407 trim_filename (file), line);
9408 }
9409 #endif /* ENABLE_TREE_CHECKING */
9410 \f
9411 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9412 and mapped to the machine mode MODE. Initialize its fields and build
9413 the information necessary for debugging output. */
9414
9415 static tree
9416 make_vector_type (tree innertype, int nunits, enum machine_mode mode)
9417 {
9418 tree t;
9419 hashval_t hashcode = 0;
9420
9421 t = make_node (VECTOR_TYPE);
9422 TREE_TYPE (t) = TYPE_MAIN_VARIANT (innertype);
9423 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9424 SET_TYPE_MODE (t, mode);
9425
9426 if (TYPE_STRUCTURAL_EQUALITY_P (innertype))
9427 SET_TYPE_STRUCTURAL_EQUALITY (t);
9428 else if (TYPE_CANONICAL (innertype) != innertype
9429 || mode != VOIDmode)
9430 TYPE_CANONICAL (t)
9431 = make_vector_type (TYPE_CANONICAL (innertype), nunits, VOIDmode);
9432
9433 layout_type (t);
9434
9435 hashcode = iterative_hash_host_wide_int (VECTOR_TYPE, hashcode);
9436 hashcode = iterative_hash_host_wide_int (nunits, hashcode);
9437 hashcode = iterative_hash_host_wide_int (mode, hashcode);
9438 hashcode = iterative_hash_object (TYPE_HASH (TREE_TYPE (t)), hashcode);
9439 t = type_hash_canon (hashcode, t);
9440
9441 /* We have built a main variant, based on the main variant of the
9442 inner type. Use it to build the variant we return. */
9443 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9444 && TREE_TYPE (t) != innertype)
9445 return build_type_attribute_qual_variant (t,
9446 TYPE_ATTRIBUTES (innertype),
9447 TYPE_QUALS (innertype));
9448
9449 return t;
9450 }
9451
9452 static tree
9453 make_or_reuse_type (unsigned size, int unsignedp)
9454 {
9455 if (size == INT_TYPE_SIZE)
9456 return unsignedp ? unsigned_type_node : integer_type_node;
9457 if (size == CHAR_TYPE_SIZE)
9458 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9459 if (size == SHORT_TYPE_SIZE)
9460 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9461 if (size == LONG_TYPE_SIZE)
9462 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9463 if (size == LONG_LONG_TYPE_SIZE)
9464 return (unsignedp ? long_long_unsigned_type_node
9465 : long_long_integer_type_node);
9466 if (size == 128 && int128_integer_type_node)
9467 return (unsignedp ? int128_unsigned_type_node
9468 : int128_integer_type_node);
9469
9470 if (unsignedp)
9471 return make_unsigned_type (size);
9472 else
9473 return make_signed_type (size);
9474 }
9475
9476 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9477
9478 static tree
9479 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9480 {
9481 if (satp)
9482 {
9483 if (size == SHORT_FRACT_TYPE_SIZE)
9484 return unsignedp ? sat_unsigned_short_fract_type_node
9485 : sat_short_fract_type_node;
9486 if (size == FRACT_TYPE_SIZE)
9487 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9488 if (size == LONG_FRACT_TYPE_SIZE)
9489 return unsignedp ? sat_unsigned_long_fract_type_node
9490 : sat_long_fract_type_node;
9491 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9492 return unsignedp ? sat_unsigned_long_long_fract_type_node
9493 : sat_long_long_fract_type_node;
9494 }
9495 else
9496 {
9497 if (size == SHORT_FRACT_TYPE_SIZE)
9498 return unsignedp ? unsigned_short_fract_type_node
9499 : short_fract_type_node;
9500 if (size == FRACT_TYPE_SIZE)
9501 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9502 if (size == LONG_FRACT_TYPE_SIZE)
9503 return unsignedp ? unsigned_long_fract_type_node
9504 : long_fract_type_node;
9505 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9506 return unsignedp ? unsigned_long_long_fract_type_node
9507 : long_long_fract_type_node;
9508 }
9509
9510 return make_fract_type (size, unsignedp, satp);
9511 }
9512
9513 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9514
9515 static tree
9516 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9517 {
9518 if (satp)
9519 {
9520 if (size == SHORT_ACCUM_TYPE_SIZE)
9521 return unsignedp ? sat_unsigned_short_accum_type_node
9522 : sat_short_accum_type_node;
9523 if (size == ACCUM_TYPE_SIZE)
9524 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9525 if (size == LONG_ACCUM_TYPE_SIZE)
9526 return unsignedp ? sat_unsigned_long_accum_type_node
9527 : sat_long_accum_type_node;
9528 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9529 return unsignedp ? sat_unsigned_long_long_accum_type_node
9530 : sat_long_long_accum_type_node;
9531 }
9532 else
9533 {
9534 if (size == SHORT_ACCUM_TYPE_SIZE)
9535 return unsignedp ? unsigned_short_accum_type_node
9536 : short_accum_type_node;
9537 if (size == ACCUM_TYPE_SIZE)
9538 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9539 if (size == LONG_ACCUM_TYPE_SIZE)
9540 return unsignedp ? unsigned_long_accum_type_node
9541 : long_accum_type_node;
9542 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9543 return unsignedp ? unsigned_long_long_accum_type_node
9544 : long_long_accum_type_node;
9545 }
9546
9547 return make_accum_type (size, unsignedp, satp);
9548 }
9549
9550
9551 /* Create an atomic variant node for TYPE. This routine is called
9552 during initialization of data types to create the 5 basic atomic
9553 types. The generic build_variant_type function requires these to
9554 already be set up in order to function properly, so cannot be
9555 called from there. */
9556
9557 static tree
9558 build_atomic_base (tree type)
9559 {
9560 tree t;
9561
9562 /* Make sure its not already registered. */
9563 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9564 return t;
9565
9566 t = build_variant_type_copy (type);
9567 set_type_quals (t, TYPE_QUAL_ATOMIC);
9568
9569 return t;
9570 }
9571
9572 /* Create nodes for all integer types (and error_mark_node) using the sizes
9573 of C datatypes. SIGNED_CHAR specifies whether char is signed,
9574 SHORT_DOUBLE specifies whether double should be of the same precision
9575 as float. */
9576
9577 void
9578 build_common_tree_nodes (bool signed_char, bool short_double)
9579 {
9580 error_mark_node = make_node (ERROR_MARK);
9581 TREE_TYPE (error_mark_node) = error_mark_node;
9582
9583 initialize_sizetypes ();
9584
9585 /* Define both `signed char' and `unsigned char'. */
9586 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9587 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9588 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9589 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9590
9591 /* Define `char', which is like either `signed char' or `unsigned char'
9592 but not the same as either. */
9593 char_type_node
9594 = (signed_char
9595 ? make_signed_type (CHAR_TYPE_SIZE)
9596 : make_unsigned_type (CHAR_TYPE_SIZE));
9597 TYPE_STRING_FLAG (char_type_node) = 1;
9598
9599 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9600 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9601 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9602 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9603 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9604 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9605 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9606 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9607 #if HOST_BITS_PER_WIDE_INT >= 64
9608 /* TODO: This isn't correct, but as logic depends at the moment on
9609 host's instead of target's wide-integer.
9610 If there is a target not supporting TImode, but has an 128-bit
9611 integer-scalar register, this target check needs to be adjusted. */
9612 if (targetm.scalar_mode_supported_p (TImode))
9613 {
9614 int128_integer_type_node = make_signed_type (128);
9615 int128_unsigned_type_node = make_unsigned_type (128);
9616 }
9617 #endif
9618
9619 /* Define a boolean type. This type only represents boolean values but
9620 may be larger than char depending on the value of BOOL_TYPE_SIZE.
9621 Front ends which want to override this size (i.e. Java) can redefine
9622 boolean_type_node before calling build_common_tree_nodes_2. */
9623 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9624 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9625 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9626 TYPE_PRECISION (boolean_type_node) = 1;
9627
9628 /* Define what type to use for size_t. */
9629 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9630 size_type_node = unsigned_type_node;
9631 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9632 size_type_node = long_unsigned_type_node;
9633 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9634 size_type_node = long_long_unsigned_type_node;
9635 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9636 size_type_node = short_unsigned_type_node;
9637 else
9638 gcc_unreachable ();
9639
9640 /* Fill in the rest of the sized types. Reuse existing type nodes
9641 when possible. */
9642 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9643 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9644 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9645 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9646 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9647
9648 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9649 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9650 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9651 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9652 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9653
9654 /* Don't call build_qualified type for atomics. That routine does
9655 special processing for atomics, and until they are initialized
9656 it's better not to make that call. */
9657
9658 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node);
9659 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node);
9660 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node);
9661 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node);
9662 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node);
9663
9664 access_public_node = get_identifier ("public");
9665 access_protected_node = get_identifier ("protected");
9666 access_private_node = get_identifier ("private");
9667
9668 /* Define these next since types below may used them. */
9669 integer_zero_node = build_int_cst (integer_type_node, 0);
9670 integer_one_node = build_int_cst (integer_type_node, 1);
9671 integer_three_node = build_int_cst (integer_type_node, 3);
9672 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9673
9674 size_zero_node = size_int (0);
9675 size_one_node = size_int (1);
9676 bitsize_zero_node = bitsize_int (0);
9677 bitsize_one_node = bitsize_int (1);
9678 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9679
9680 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9681 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9682
9683 void_type_node = make_node (VOID_TYPE);
9684 layout_type (void_type_node);
9685
9686 pointer_bounds_type_node = targetm.chkp_bound_type ();
9687
9688 /* We are not going to have real types in C with less than byte alignment,
9689 so we might as well not have any types that claim to have it. */
9690 TYPE_ALIGN (void_type_node) = BITS_PER_UNIT;
9691 TYPE_USER_ALIGN (void_type_node) = 0;
9692
9693 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9694 layout_type (TREE_TYPE (null_pointer_node));
9695
9696 ptr_type_node = build_pointer_type (void_type_node);
9697 const_ptr_type_node
9698 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9699 fileptr_type_node = ptr_type_node;
9700
9701 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9702
9703 float_type_node = make_node (REAL_TYPE);
9704 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9705 layout_type (float_type_node);
9706
9707 double_type_node = make_node (REAL_TYPE);
9708 if (short_double)
9709 TYPE_PRECISION (double_type_node) = FLOAT_TYPE_SIZE;
9710 else
9711 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9712 layout_type (double_type_node);
9713
9714 long_double_type_node = make_node (REAL_TYPE);
9715 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9716 layout_type (long_double_type_node);
9717
9718 float_ptr_type_node = build_pointer_type (float_type_node);
9719 double_ptr_type_node = build_pointer_type (double_type_node);
9720 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9721 integer_ptr_type_node = build_pointer_type (integer_type_node);
9722
9723 /* Fixed size integer types. */
9724 uint16_type_node = build_nonstandard_integer_type (16, true);
9725 uint32_type_node = build_nonstandard_integer_type (32, true);
9726 uint64_type_node = build_nonstandard_integer_type (64, true);
9727
9728 /* Decimal float types. */
9729 dfloat32_type_node = make_node (REAL_TYPE);
9730 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9731 layout_type (dfloat32_type_node);
9732 SET_TYPE_MODE (dfloat32_type_node, SDmode);
9733 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
9734
9735 dfloat64_type_node = make_node (REAL_TYPE);
9736 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9737 layout_type (dfloat64_type_node);
9738 SET_TYPE_MODE (dfloat64_type_node, DDmode);
9739 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
9740
9741 dfloat128_type_node = make_node (REAL_TYPE);
9742 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9743 layout_type (dfloat128_type_node);
9744 SET_TYPE_MODE (dfloat128_type_node, TDmode);
9745 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
9746
9747 complex_integer_type_node = build_complex_type (integer_type_node);
9748 complex_float_type_node = build_complex_type (float_type_node);
9749 complex_double_type_node = build_complex_type (double_type_node);
9750 complex_long_double_type_node = build_complex_type (long_double_type_node);
9751
9752 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9753 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9754 sat_ ## KIND ## _type_node = \
9755 make_sat_signed_ ## KIND ## _type (SIZE); \
9756 sat_unsigned_ ## KIND ## _type_node = \
9757 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9758 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9759 unsigned_ ## KIND ## _type_node = \
9760 make_unsigned_ ## KIND ## _type (SIZE);
9761
9762 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9763 sat_ ## WIDTH ## KIND ## _type_node = \
9764 make_sat_signed_ ## KIND ## _type (SIZE); \
9765 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9766 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9767 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9768 unsigned_ ## WIDTH ## KIND ## _type_node = \
9769 make_unsigned_ ## KIND ## _type (SIZE);
9770
9771 /* Make fixed-point type nodes based on four different widths. */
9772 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9773 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9774 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9775 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9776 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9777
9778 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9779 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9780 NAME ## _type_node = \
9781 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9782 u ## NAME ## _type_node = \
9783 make_or_reuse_unsigned_ ## KIND ## _type \
9784 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9785 sat_ ## NAME ## _type_node = \
9786 make_or_reuse_sat_signed_ ## KIND ## _type \
9787 (GET_MODE_BITSIZE (MODE ## mode)); \
9788 sat_u ## NAME ## _type_node = \
9789 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9790 (GET_MODE_BITSIZE (U ## MODE ## mode));
9791
9792 /* Fixed-point type and mode nodes. */
9793 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
9794 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
9795 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
9796 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
9797 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
9798 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
9799 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
9800 MAKE_FIXED_MODE_NODE (accum, ha, HA)
9801 MAKE_FIXED_MODE_NODE (accum, sa, SA)
9802 MAKE_FIXED_MODE_NODE (accum, da, DA)
9803 MAKE_FIXED_MODE_NODE (accum, ta, TA)
9804
9805 {
9806 tree t = targetm.build_builtin_va_list ();
9807
9808 /* Many back-ends define record types without setting TYPE_NAME.
9809 If we copied the record type here, we'd keep the original
9810 record type without a name. This breaks name mangling. So,
9811 don't copy record types and let c_common_nodes_and_builtins()
9812 declare the type to be __builtin_va_list. */
9813 if (TREE_CODE (t) != RECORD_TYPE)
9814 t = build_variant_type_copy (t);
9815
9816 va_list_type_node = t;
9817 }
9818 }
9819
9820 /* Modify DECL for given flags.
9821 TM_PURE attribute is set only on types, so the function will modify
9822 DECL's type when ECF_TM_PURE is used. */
9823
9824 void
9825 set_call_expr_flags (tree decl, int flags)
9826 {
9827 if (flags & ECF_NOTHROW)
9828 TREE_NOTHROW (decl) = 1;
9829 if (flags & ECF_CONST)
9830 TREE_READONLY (decl) = 1;
9831 if (flags & ECF_PURE)
9832 DECL_PURE_P (decl) = 1;
9833 if (flags & ECF_LOOPING_CONST_OR_PURE)
9834 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
9835 if (flags & ECF_NOVOPS)
9836 DECL_IS_NOVOPS (decl) = 1;
9837 if (flags & ECF_NORETURN)
9838 TREE_THIS_VOLATILE (decl) = 1;
9839 if (flags & ECF_MALLOC)
9840 DECL_IS_MALLOC (decl) = 1;
9841 if (flags & ECF_RETURNS_TWICE)
9842 DECL_IS_RETURNS_TWICE (decl) = 1;
9843 if (flags & ECF_LEAF)
9844 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
9845 NULL, DECL_ATTRIBUTES (decl));
9846 if ((flags & ECF_TM_PURE) && flag_tm)
9847 apply_tm_attr (decl, get_identifier ("transaction_pure"));
9848 /* Looping const or pure is implied by noreturn.
9849 There is currently no way to declare looping const or looping pure alone. */
9850 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
9851 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
9852 }
9853
9854
9855 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9856
9857 static void
9858 local_define_builtin (const char *name, tree type, enum built_in_function code,
9859 const char *library_name, int ecf_flags)
9860 {
9861 tree decl;
9862
9863 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
9864 library_name, NULL_TREE);
9865 set_call_expr_flags (decl, ecf_flags);
9866
9867 set_builtin_decl (code, decl, true);
9868 }
9869
9870 /* Call this function after instantiating all builtins that the language
9871 front end cares about. This will build the rest of the builtins that
9872 are relied upon by the tree optimizers and the middle-end. */
9873
9874 void
9875 build_common_builtin_nodes (void)
9876 {
9877 tree tmp, ftype;
9878 int ecf_flags;
9879
9880 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
9881 {
9882 ftype = build_function_type (void_type_node, void_list_node);
9883 local_define_builtin ("__builtin_unreachable", ftype, BUILT_IN_UNREACHABLE,
9884 "__builtin_unreachable",
9885 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
9886 | ECF_CONST | ECF_LEAF);
9887 }
9888
9889 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
9890 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9891 {
9892 ftype = build_function_type_list (ptr_type_node,
9893 ptr_type_node, const_ptr_type_node,
9894 size_type_node, NULL_TREE);
9895
9896 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
9897 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
9898 "memcpy", ECF_NOTHROW | ECF_LEAF);
9899 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9900 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
9901 "memmove", ECF_NOTHROW | ECF_LEAF);
9902 }
9903
9904 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
9905 {
9906 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9907 const_ptr_type_node, size_type_node,
9908 NULL_TREE);
9909 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
9910 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9911 }
9912
9913 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
9914 {
9915 ftype = build_function_type_list (ptr_type_node,
9916 ptr_type_node, integer_type_node,
9917 size_type_node, NULL_TREE);
9918 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
9919 "memset", ECF_NOTHROW | ECF_LEAF);
9920 }
9921
9922 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
9923 {
9924 ftype = build_function_type_list (ptr_type_node,
9925 size_type_node, NULL_TREE);
9926 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
9927 "alloca", ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
9928 }
9929
9930 ftype = build_function_type_list (ptr_type_node, size_type_node,
9931 size_type_node, NULL_TREE);
9932 local_define_builtin ("__builtin_alloca_with_align", ftype,
9933 BUILT_IN_ALLOCA_WITH_ALIGN, "alloca",
9934 ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
9935
9936 /* If we're checking the stack, `alloca' can throw. */
9937 if (flag_stack_check)
9938 {
9939 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA)) = 0;
9940 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN)) = 0;
9941 }
9942
9943 ftype = build_function_type_list (void_type_node,
9944 ptr_type_node, ptr_type_node,
9945 ptr_type_node, NULL_TREE);
9946 local_define_builtin ("__builtin_init_trampoline", ftype,
9947 BUILT_IN_INIT_TRAMPOLINE,
9948 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
9949 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
9950 BUILT_IN_INIT_HEAP_TRAMPOLINE,
9951 "__builtin_init_heap_trampoline",
9952 ECF_NOTHROW | ECF_LEAF);
9953
9954 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
9955 local_define_builtin ("__builtin_adjust_trampoline", ftype,
9956 BUILT_IN_ADJUST_TRAMPOLINE,
9957 "__builtin_adjust_trampoline",
9958 ECF_CONST | ECF_NOTHROW);
9959
9960 ftype = build_function_type_list (void_type_node,
9961 ptr_type_node, ptr_type_node, NULL_TREE);
9962 local_define_builtin ("__builtin_nonlocal_goto", ftype,
9963 BUILT_IN_NONLOCAL_GOTO,
9964 "__builtin_nonlocal_goto",
9965 ECF_NORETURN | ECF_NOTHROW);
9966
9967 ftype = build_function_type_list (void_type_node,
9968 ptr_type_node, ptr_type_node, NULL_TREE);
9969 local_define_builtin ("__builtin_setjmp_setup", ftype,
9970 BUILT_IN_SETJMP_SETUP,
9971 "__builtin_setjmp_setup", ECF_NOTHROW);
9972
9973 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
9974 local_define_builtin ("__builtin_setjmp_dispatcher", ftype,
9975 BUILT_IN_SETJMP_DISPATCHER,
9976 "__builtin_setjmp_dispatcher",
9977 ECF_PURE | ECF_NOTHROW);
9978
9979 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9980 local_define_builtin ("__builtin_setjmp_receiver", ftype,
9981 BUILT_IN_SETJMP_RECEIVER,
9982 "__builtin_setjmp_receiver", ECF_NOTHROW);
9983
9984 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
9985 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
9986 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
9987
9988 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9989 local_define_builtin ("__builtin_stack_restore", ftype,
9990 BUILT_IN_STACK_RESTORE,
9991 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
9992
9993 /* If there's a possibility that we might use the ARM EABI, build the
9994 alternate __cxa_end_cleanup node used to resume from C++ and Java. */
9995 if (targetm.arm_eabi_unwinder)
9996 {
9997 ftype = build_function_type_list (void_type_node, NULL_TREE);
9998 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
9999 BUILT_IN_CXA_END_CLEANUP,
10000 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10001 }
10002
10003 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10004 local_define_builtin ("__builtin_unwind_resume", ftype,
10005 BUILT_IN_UNWIND_RESUME,
10006 ((targetm_common.except_unwind_info (&global_options)
10007 == UI_SJLJ)
10008 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10009 ECF_NORETURN);
10010
10011 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10012 {
10013 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10014 NULL_TREE);
10015 local_define_builtin ("__builtin_return_address", ftype,
10016 BUILT_IN_RETURN_ADDRESS,
10017 "__builtin_return_address",
10018 ECF_NOTHROW);
10019 }
10020
10021 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10022 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10023 {
10024 ftype = build_function_type_list (void_type_node, ptr_type_node,
10025 ptr_type_node, NULL_TREE);
10026 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10027 local_define_builtin ("__cyg_profile_func_enter", ftype,
10028 BUILT_IN_PROFILE_FUNC_ENTER,
10029 "__cyg_profile_func_enter", 0);
10030 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10031 local_define_builtin ("__cyg_profile_func_exit", ftype,
10032 BUILT_IN_PROFILE_FUNC_EXIT,
10033 "__cyg_profile_func_exit", 0);
10034 }
10035
10036 /* The exception object and filter values from the runtime. The argument
10037 must be zero before exception lowering, i.e. from the front end. After
10038 exception lowering, it will be the region number for the exception
10039 landing pad. These functions are PURE instead of CONST to prevent
10040 them from being hoisted past the exception edge that will initialize
10041 its value in the landing pad. */
10042 ftype = build_function_type_list (ptr_type_node,
10043 integer_type_node, NULL_TREE);
10044 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10045 /* Only use TM_PURE if we we have TM language support. */
10046 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10047 ecf_flags |= ECF_TM_PURE;
10048 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10049 "__builtin_eh_pointer", ecf_flags);
10050
10051 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10052 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10053 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10054 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10055
10056 ftype = build_function_type_list (void_type_node,
10057 integer_type_node, integer_type_node,
10058 NULL_TREE);
10059 local_define_builtin ("__builtin_eh_copy_values", ftype,
10060 BUILT_IN_EH_COPY_VALUES,
10061 "__builtin_eh_copy_values", ECF_NOTHROW);
10062
10063 /* Complex multiplication and division. These are handled as builtins
10064 rather than optabs because emit_library_call_value doesn't support
10065 complex. Further, we can do slightly better with folding these
10066 beasties if the real and complex parts of the arguments are separate. */
10067 {
10068 int mode;
10069
10070 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10071 {
10072 char mode_name_buf[4], *q;
10073 const char *p;
10074 enum built_in_function mcode, dcode;
10075 tree type, inner_type;
10076 const char *prefix = "__";
10077
10078 if (targetm.libfunc_gnu_prefix)
10079 prefix = "__gnu_";
10080
10081 type = lang_hooks.types.type_for_mode ((enum machine_mode) mode, 0);
10082 if (type == NULL)
10083 continue;
10084 inner_type = TREE_TYPE (type);
10085
10086 ftype = build_function_type_list (type, inner_type, inner_type,
10087 inner_type, inner_type, NULL_TREE);
10088
10089 mcode = ((enum built_in_function)
10090 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10091 dcode = ((enum built_in_function)
10092 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10093
10094 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10095 *q = TOLOWER (*p);
10096 *q = '\0';
10097
10098 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10099 NULL);
10100 local_define_builtin (built_in_names[mcode], ftype, mcode,
10101 built_in_names[mcode],
10102 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10103
10104 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10105 NULL);
10106 local_define_builtin (built_in_names[dcode], ftype, dcode,
10107 built_in_names[dcode],
10108 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10109 }
10110 }
10111 }
10112
10113 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10114 better way.
10115
10116 If we requested a pointer to a vector, build up the pointers that
10117 we stripped off while looking for the inner type. Similarly for
10118 return values from functions.
10119
10120 The argument TYPE is the top of the chain, and BOTTOM is the
10121 new type which we will point to. */
10122
10123 tree
10124 reconstruct_complex_type (tree type, tree bottom)
10125 {
10126 tree inner, outer;
10127
10128 if (TREE_CODE (type) == POINTER_TYPE)
10129 {
10130 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10131 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10132 TYPE_REF_CAN_ALIAS_ALL (type));
10133 }
10134 else if (TREE_CODE (type) == REFERENCE_TYPE)
10135 {
10136 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10137 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10138 TYPE_REF_CAN_ALIAS_ALL (type));
10139 }
10140 else if (TREE_CODE (type) == ARRAY_TYPE)
10141 {
10142 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10143 outer = build_array_type (inner, TYPE_DOMAIN (type));
10144 }
10145 else if (TREE_CODE (type) == FUNCTION_TYPE)
10146 {
10147 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10148 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10149 }
10150 else if (TREE_CODE (type) == METHOD_TYPE)
10151 {
10152 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10153 /* The build_method_type_directly() routine prepends 'this' to argument list,
10154 so we must compensate by getting rid of it. */
10155 outer
10156 = build_method_type_directly
10157 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10158 inner,
10159 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10160 }
10161 else if (TREE_CODE (type) == OFFSET_TYPE)
10162 {
10163 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10164 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10165 }
10166 else
10167 return bottom;
10168
10169 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10170 TYPE_QUALS (type));
10171 }
10172
10173 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10174 the inner type. */
10175 tree
10176 build_vector_type_for_mode (tree innertype, enum machine_mode mode)
10177 {
10178 int nunits;
10179
10180 switch (GET_MODE_CLASS (mode))
10181 {
10182 case MODE_VECTOR_INT:
10183 case MODE_VECTOR_FLOAT:
10184 case MODE_VECTOR_FRACT:
10185 case MODE_VECTOR_UFRACT:
10186 case MODE_VECTOR_ACCUM:
10187 case MODE_VECTOR_UACCUM:
10188 nunits = GET_MODE_NUNITS (mode);
10189 break;
10190
10191 case MODE_INT:
10192 /* Check that there are no leftover bits. */
10193 gcc_assert (GET_MODE_BITSIZE (mode)
10194 % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10195
10196 nunits = GET_MODE_BITSIZE (mode)
10197 / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10198 break;
10199
10200 default:
10201 gcc_unreachable ();
10202 }
10203
10204 return make_vector_type (innertype, nunits, mode);
10205 }
10206
10207 /* Similarly, but takes the inner type and number of units, which must be
10208 a power of two. */
10209
10210 tree
10211 build_vector_type (tree innertype, int nunits)
10212 {
10213 return make_vector_type (innertype, nunits, VOIDmode);
10214 }
10215
10216 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10217
10218 tree
10219 build_opaque_vector_type (tree innertype, int nunits)
10220 {
10221 tree t = make_vector_type (innertype, nunits, VOIDmode);
10222 tree cand;
10223 /* We always build the non-opaque variant before the opaque one,
10224 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10225 cand = TYPE_NEXT_VARIANT (t);
10226 if (cand
10227 && TYPE_VECTOR_OPAQUE (cand)
10228 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10229 return cand;
10230 /* Othewise build a variant type and make sure to queue it after
10231 the non-opaque type. */
10232 cand = build_distinct_type_copy (t);
10233 TYPE_VECTOR_OPAQUE (cand) = true;
10234 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10235 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10236 TYPE_NEXT_VARIANT (t) = cand;
10237 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10238 return cand;
10239 }
10240
10241
10242 /* Given an initializer INIT, return TRUE if INIT is zero or some
10243 aggregate of zeros. Otherwise return FALSE. */
10244 bool
10245 initializer_zerop (const_tree init)
10246 {
10247 tree elt;
10248
10249 STRIP_NOPS (init);
10250
10251 switch (TREE_CODE (init))
10252 {
10253 case INTEGER_CST:
10254 return integer_zerop (init);
10255
10256 case REAL_CST:
10257 /* ??? Note that this is not correct for C4X float formats. There,
10258 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10259 negative exponent. */
10260 return real_zerop (init)
10261 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
10262
10263 case FIXED_CST:
10264 return fixed_zerop (init);
10265
10266 case COMPLEX_CST:
10267 return integer_zerop (init)
10268 || (real_zerop (init)
10269 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10270 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
10271
10272 case VECTOR_CST:
10273 {
10274 unsigned i;
10275 for (i = 0; i < VECTOR_CST_NELTS (init); ++i)
10276 if (!initializer_zerop (VECTOR_CST_ELT (init, i)))
10277 return false;
10278 return true;
10279 }
10280
10281 case CONSTRUCTOR:
10282 {
10283 unsigned HOST_WIDE_INT idx;
10284
10285 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10286 if (!initializer_zerop (elt))
10287 return false;
10288 return true;
10289 }
10290
10291 case STRING_CST:
10292 {
10293 int i;
10294
10295 /* We need to loop through all elements to handle cases like
10296 "\0" and "\0foobar". */
10297 for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
10298 if (TREE_STRING_POINTER (init)[i] != '\0')
10299 return false;
10300
10301 return true;
10302 }
10303
10304 default:
10305 return false;
10306 }
10307 }
10308
10309 /* Check if vector VEC consists of all the equal elements and
10310 that the number of elements corresponds to the type of VEC.
10311 The function returns first element of the vector
10312 or NULL_TREE if the vector is not uniform. */
10313 tree
10314 uniform_vector_p (const_tree vec)
10315 {
10316 tree first, t;
10317 unsigned i;
10318
10319 if (vec == NULL_TREE)
10320 return NULL_TREE;
10321
10322 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10323
10324 if (TREE_CODE (vec) == VECTOR_CST)
10325 {
10326 first = VECTOR_CST_ELT (vec, 0);
10327 for (i = 1; i < VECTOR_CST_NELTS (vec); ++i)
10328 if (!operand_equal_p (first, VECTOR_CST_ELT (vec, i), 0))
10329 return NULL_TREE;
10330
10331 return first;
10332 }
10333
10334 else if (TREE_CODE (vec) == CONSTRUCTOR)
10335 {
10336 first = error_mark_node;
10337
10338 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10339 {
10340 if (i == 0)
10341 {
10342 first = t;
10343 continue;
10344 }
10345 if (!operand_equal_p (first, t, 0))
10346 return NULL_TREE;
10347 }
10348 if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)))
10349 return NULL_TREE;
10350
10351 return first;
10352 }
10353
10354 return NULL_TREE;
10355 }
10356
10357 /* Build an empty statement at location LOC. */
10358
10359 tree
10360 build_empty_stmt (location_t loc)
10361 {
10362 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10363 SET_EXPR_LOCATION (t, loc);
10364 return t;
10365 }
10366
10367
10368 /* Build an OpenMP clause with code CODE. LOC is the location of the
10369 clause. */
10370
10371 tree
10372 build_omp_clause (location_t loc, enum omp_clause_code code)
10373 {
10374 tree t;
10375 int size, length;
10376
10377 length = omp_clause_num_ops[code];
10378 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10379
10380 record_node_allocation_statistics (OMP_CLAUSE, size);
10381
10382 t = ggc_alloc_tree_node (size);
10383 memset (t, 0, size);
10384 TREE_SET_CODE (t, OMP_CLAUSE);
10385 OMP_CLAUSE_SET_CODE (t, code);
10386 OMP_CLAUSE_LOCATION (t) = loc;
10387
10388 return t;
10389 }
10390
10391 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10392 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10393 Except for the CODE and operand count field, other storage for the
10394 object is initialized to zeros. */
10395
10396 tree
10397 build_vl_exp_stat (enum tree_code code, int len MEM_STAT_DECL)
10398 {
10399 tree t;
10400 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10401
10402 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10403 gcc_assert (len >= 1);
10404
10405 record_node_allocation_statistics (code, length);
10406
10407 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10408
10409 TREE_SET_CODE (t, code);
10410
10411 /* Can't use TREE_OPERAND to store the length because if checking is
10412 enabled, it will try to check the length before we store it. :-P */
10413 t->exp.operands[0] = build_int_cst (sizetype, len);
10414
10415 return t;
10416 }
10417
10418 /* Helper function for build_call_* functions; build a CALL_EXPR with
10419 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10420 the argument slots. */
10421
10422 static tree
10423 build_call_1 (tree return_type, tree fn, int nargs)
10424 {
10425 tree t;
10426
10427 t = build_vl_exp (CALL_EXPR, nargs + 3);
10428 TREE_TYPE (t) = return_type;
10429 CALL_EXPR_FN (t) = fn;
10430 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10431
10432 return t;
10433 }
10434
10435 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10436 FN and a null static chain slot. NARGS is the number of call arguments
10437 which are specified as "..." arguments. */
10438
10439 tree
10440 build_call_nary (tree return_type, tree fn, int nargs, ...)
10441 {
10442 tree ret;
10443 va_list args;
10444 va_start (args, nargs);
10445 ret = build_call_valist (return_type, fn, nargs, args);
10446 va_end (args);
10447 return ret;
10448 }
10449
10450 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10451 FN and a null static chain slot. NARGS is the number of call arguments
10452 which are specified as a va_list ARGS. */
10453
10454 tree
10455 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10456 {
10457 tree t;
10458 int i;
10459
10460 t = build_call_1 (return_type, fn, nargs);
10461 for (i = 0; i < nargs; i++)
10462 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10463 process_call_operands (t);
10464 return t;
10465 }
10466
10467 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10468 FN and a null static chain slot. NARGS is the number of call arguments
10469 which are specified as a tree array ARGS. */
10470
10471 tree
10472 build_call_array_loc (location_t loc, tree return_type, tree fn,
10473 int nargs, const tree *args)
10474 {
10475 tree t;
10476 int i;
10477
10478 t = build_call_1 (return_type, fn, nargs);
10479 for (i = 0; i < nargs; i++)
10480 CALL_EXPR_ARG (t, i) = args[i];
10481 process_call_operands (t);
10482 SET_EXPR_LOCATION (t, loc);
10483 return t;
10484 }
10485
10486 /* Like build_call_array, but takes a vec. */
10487
10488 tree
10489 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
10490 {
10491 tree ret, t;
10492 unsigned int ix;
10493
10494 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10495 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10496 CALL_EXPR_ARG (ret, ix) = t;
10497 process_call_operands (ret);
10498 return ret;
10499 }
10500
10501 /* Return true if T (assumed to be a DECL) must be assigned a memory
10502 location. */
10503
10504 bool
10505 needs_to_live_in_memory (const_tree t)
10506 {
10507 return (TREE_ADDRESSABLE (t)
10508 || is_global_var (t)
10509 || (TREE_CODE (t) == RESULT_DECL
10510 && !DECL_BY_REFERENCE (t)
10511 && aggregate_value_p (t, current_function_decl)));
10512 }
10513
10514 /* Return value of a constant X and sign-extend it. */
10515
10516 HOST_WIDE_INT
10517 int_cst_value (const_tree x)
10518 {
10519 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10520 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
10521
10522 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10523 gcc_assert (TREE_INT_CST_HIGH (x) == 0
10524 || TREE_INT_CST_HIGH (x) == -1);
10525
10526 if (bits < HOST_BITS_PER_WIDE_INT)
10527 {
10528 bool negative = ((val >> (bits - 1)) & 1) != 0;
10529 if (negative)
10530 val |= (~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1;
10531 else
10532 val &= ~((~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1);
10533 }
10534
10535 return val;
10536 }
10537
10538 /* Return value of a constant X and sign-extend it. */
10539
10540 HOST_WIDEST_INT
10541 widest_int_cst_value (const_tree x)
10542 {
10543 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10544 unsigned HOST_WIDEST_INT val = TREE_INT_CST_LOW (x);
10545
10546 #if HOST_BITS_PER_WIDEST_INT > HOST_BITS_PER_WIDE_INT
10547 gcc_assert (HOST_BITS_PER_WIDEST_INT >= HOST_BITS_PER_DOUBLE_INT);
10548 val |= (((unsigned HOST_WIDEST_INT) TREE_INT_CST_HIGH (x))
10549 << HOST_BITS_PER_WIDE_INT);
10550 #else
10551 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10552 gcc_assert (TREE_INT_CST_HIGH (x) == 0
10553 || TREE_INT_CST_HIGH (x) == -1);
10554 #endif
10555
10556 if (bits < HOST_BITS_PER_WIDEST_INT)
10557 {
10558 bool negative = ((val >> (bits - 1)) & 1) != 0;
10559 if (negative)
10560 val |= (~(unsigned HOST_WIDEST_INT) 0) << (bits - 1) << 1;
10561 else
10562 val &= ~((~(unsigned HOST_WIDEST_INT) 0) << (bits - 1) << 1);
10563 }
10564
10565 return val;
10566 }
10567
10568 /* If TYPE is an integral or pointer type, return an integer type with
10569 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10570 if TYPE is already an integer type of signedness UNSIGNEDP. */
10571
10572 tree
10573 signed_or_unsigned_type_for (int unsignedp, tree type)
10574 {
10575 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
10576 return type;
10577
10578 if (TREE_CODE (type) == VECTOR_TYPE)
10579 {
10580 tree inner = TREE_TYPE (type);
10581 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10582 if (!inner2)
10583 return NULL_TREE;
10584 if (inner == inner2)
10585 return type;
10586 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
10587 }
10588
10589 if (!INTEGRAL_TYPE_P (type)
10590 && !POINTER_TYPE_P (type))
10591 return NULL_TREE;
10592
10593 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
10594 }
10595
10596 /* If TYPE is an integral or pointer type, return an integer type with
10597 the same precision which is unsigned, or itself if TYPE is already an
10598 unsigned integer type. */
10599
10600 tree
10601 unsigned_type_for (tree type)
10602 {
10603 return signed_or_unsigned_type_for (1, type);
10604 }
10605
10606 /* If TYPE is an integral or pointer type, return an integer type with
10607 the same precision which is signed, or itself if TYPE is already a
10608 signed integer type. */
10609
10610 tree
10611 signed_type_for (tree type)
10612 {
10613 return signed_or_unsigned_type_for (0, type);
10614 }
10615
10616 /* If TYPE is a vector type, return a signed integer vector type with the
10617 same width and number of subparts. Otherwise return boolean_type_node. */
10618
10619 tree
10620 truth_type_for (tree type)
10621 {
10622 if (TREE_CODE (type) == VECTOR_TYPE)
10623 {
10624 tree elem = lang_hooks.types.type_for_size
10625 (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))), 0);
10626 return build_opaque_vector_type (elem, TYPE_VECTOR_SUBPARTS (type));
10627 }
10628 else
10629 return boolean_type_node;
10630 }
10631
10632 /* Returns the largest value obtainable by casting something in INNER type to
10633 OUTER type. */
10634
10635 tree
10636 upper_bound_in_type (tree outer, tree inner)
10637 {
10638 double_int high;
10639 unsigned int det = 0;
10640 unsigned oprec = TYPE_PRECISION (outer);
10641 unsigned iprec = TYPE_PRECISION (inner);
10642 unsigned prec;
10643
10644 /* Compute a unique number for every combination. */
10645 det |= (oprec > iprec) ? 4 : 0;
10646 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
10647 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
10648
10649 /* Determine the exponent to use. */
10650 switch (det)
10651 {
10652 case 0:
10653 case 1:
10654 /* oprec <= iprec, outer: signed, inner: don't care. */
10655 prec = oprec - 1;
10656 break;
10657 case 2:
10658 case 3:
10659 /* oprec <= iprec, outer: unsigned, inner: don't care. */
10660 prec = oprec;
10661 break;
10662 case 4:
10663 /* oprec > iprec, outer: signed, inner: signed. */
10664 prec = iprec - 1;
10665 break;
10666 case 5:
10667 /* oprec > iprec, outer: signed, inner: unsigned. */
10668 prec = iprec;
10669 break;
10670 case 6:
10671 /* oprec > iprec, outer: unsigned, inner: signed. */
10672 prec = oprec;
10673 break;
10674 case 7:
10675 /* oprec > iprec, outer: unsigned, inner: unsigned. */
10676 prec = iprec;
10677 break;
10678 default:
10679 gcc_unreachable ();
10680 }
10681
10682 /* Compute 2^^prec - 1. */
10683 if (prec <= HOST_BITS_PER_WIDE_INT)
10684 {
10685 high.high = 0;
10686 high.low = ((~(unsigned HOST_WIDE_INT) 0)
10687 >> (HOST_BITS_PER_WIDE_INT - prec));
10688 }
10689 else
10690 {
10691 high.high = ((~(unsigned HOST_WIDE_INT) 0)
10692 >> (HOST_BITS_PER_DOUBLE_INT - prec));
10693 high.low = ~(unsigned HOST_WIDE_INT) 0;
10694 }
10695
10696 return double_int_to_tree (outer, high);
10697 }
10698
10699 /* Returns the smallest value obtainable by casting something in INNER type to
10700 OUTER type. */
10701
10702 tree
10703 lower_bound_in_type (tree outer, tree inner)
10704 {
10705 double_int low;
10706 unsigned oprec = TYPE_PRECISION (outer);
10707 unsigned iprec = TYPE_PRECISION (inner);
10708
10709 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
10710 and obtain 0. */
10711 if (TYPE_UNSIGNED (outer)
10712 /* If we are widening something of an unsigned type, OUTER type
10713 contains all values of INNER type. In particular, both INNER
10714 and OUTER types have zero in common. */
10715 || (oprec > iprec && TYPE_UNSIGNED (inner)))
10716 low.low = low.high = 0;
10717 else
10718 {
10719 /* If we are widening a signed type to another signed type, we
10720 want to obtain -2^^(iprec-1). If we are keeping the
10721 precision or narrowing to a signed type, we want to obtain
10722 -2^(oprec-1). */
10723 unsigned prec = oprec > iprec ? iprec : oprec;
10724
10725 if (prec <= HOST_BITS_PER_WIDE_INT)
10726 {
10727 low.high = ~(unsigned HOST_WIDE_INT) 0;
10728 low.low = (~(unsigned HOST_WIDE_INT) 0) << (prec - 1);
10729 }
10730 else
10731 {
10732 low.high = ((~(unsigned HOST_WIDE_INT) 0)
10733 << (prec - HOST_BITS_PER_WIDE_INT - 1));
10734 low.low = 0;
10735 }
10736 }
10737
10738 return double_int_to_tree (outer, low);
10739 }
10740
10741 /* Return nonzero if two operands that are suitable for PHI nodes are
10742 necessarily equal. Specifically, both ARG0 and ARG1 must be either
10743 SSA_NAME or invariant. Note that this is strictly an optimization.
10744 That is, callers of this function can directly call operand_equal_p
10745 and get the same result, only slower. */
10746
10747 int
10748 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
10749 {
10750 if (arg0 == arg1)
10751 return 1;
10752 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
10753 return 0;
10754 return operand_equal_p (arg0, arg1, 0);
10755 }
10756
10757 /* Returns number of zeros at the end of binary representation of X.
10758
10759 ??? Use ffs if available? */
10760
10761 tree
10762 num_ending_zeros (const_tree x)
10763 {
10764 unsigned HOST_WIDE_INT fr, nfr;
10765 unsigned num, abits;
10766 tree type = TREE_TYPE (x);
10767
10768 if (TREE_INT_CST_LOW (x) == 0)
10769 {
10770 num = HOST_BITS_PER_WIDE_INT;
10771 fr = TREE_INT_CST_HIGH (x);
10772 }
10773 else
10774 {
10775 num = 0;
10776 fr = TREE_INT_CST_LOW (x);
10777 }
10778
10779 for (abits = HOST_BITS_PER_WIDE_INT / 2; abits; abits /= 2)
10780 {
10781 nfr = fr >> abits;
10782 if (nfr << abits == fr)
10783 {
10784 num += abits;
10785 fr = nfr;
10786 }
10787 }
10788
10789 if (num > TYPE_PRECISION (type))
10790 num = TYPE_PRECISION (type);
10791
10792 return build_int_cst_type (type, num);
10793 }
10794
10795
10796 #define WALK_SUBTREE(NODE) \
10797 do \
10798 { \
10799 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
10800 if (result) \
10801 return result; \
10802 } \
10803 while (0)
10804
10805 /* This is a subroutine of walk_tree that walks field of TYPE that are to
10806 be walked whenever a type is seen in the tree. Rest of operands and return
10807 value are as for walk_tree. */
10808
10809 static tree
10810 walk_type_fields (tree type, walk_tree_fn func, void *data,
10811 struct pointer_set_t *pset, walk_tree_lh lh)
10812 {
10813 tree result = NULL_TREE;
10814
10815 switch (TREE_CODE (type))
10816 {
10817 case POINTER_TYPE:
10818 case REFERENCE_TYPE:
10819 /* We have to worry about mutually recursive pointers. These can't
10820 be written in C. They can in Ada. It's pathological, but
10821 there's an ACATS test (c38102a) that checks it. Deal with this
10822 by checking if we're pointing to another pointer, that one
10823 points to another pointer, that one does too, and we have no htab.
10824 If so, get a hash table. We check three levels deep to avoid
10825 the cost of the hash table if we don't need one. */
10826 if (POINTER_TYPE_P (TREE_TYPE (type))
10827 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
10828 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
10829 && !pset)
10830 {
10831 result = walk_tree_without_duplicates (&TREE_TYPE (type),
10832 func, data);
10833 if (result)
10834 return result;
10835
10836 break;
10837 }
10838
10839 /* ... fall through ... */
10840
10841 case COMPLEX_TYPE:
10842 WALK_SUBTREE (TREE_TYPE (type));
10843 break;
10844
10845 case METHOD_TYPE:
10846 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
10847
10848 /* Fall through. */
10849
10850 case FUNCTION_TYPE:
10851 WALK_SUBTREE (TREE_TYPE (type));
10852 {
10853 tree arg;
10854
10855 /* We never want to walk into default arguments. */
10856 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
10857 WALK_SUBTREE (TREE_VALUE (arg));
10858 }
10859 break;
10860
10861 case ARRAY_TYPE:
10862 /* Don't follow this nodes's type if a pointer for fear that
10863 we'll have infinite recursion. If we have a PSET, then we
10864 need not fear. */
10865 if (pset
10866 || (!POINTER_TYPE_P (TREE_TYPE (type))
10867 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
10868 WALK_SUBTREE (TREE_TYPE (type));
10869 WALK_SUBTREE (TYPE_DOMAIN (type));
10870 break;
10871
10872 case OFFSET_TYPE:
10873 WALK_SUBTREE (TREE_TYPE (type));
10874 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
10875 break;
10876
10877 default:
10878 break;
10879 }
10880
10881 return NULL_TREE;
10882 }
10883
10884 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
10885 called with the DATA and the address of each sub-tree. If FUNC returns a
10886 non-NULL value, the traversal is stopped, and the value returned by FUNC
10887 is returned. If PSET is non-NULL it is used to record the nodes visited,
10888 and to avoid visiting a node more than once. */
10889
10890 tree
10891 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
10892 struct pointer_set_t *pset, walk_tree_lh lh)
10893 {
10894 enum tree_code code;
10895 int walk_subtrees;
10896 tree result;
10897
10898 #define WALK_SUBTREE_TAIL(NODE) \
10899 do \
10900 { \
10901 tp = & (NODE); \
10902 goto tail_recurse; \
10903 } \
10904 while (0)
10905
10906 tail_recurse:
10907 /* Skip empty subtrees. */
10908 if (!*tp)
10909 return NULL_TREE;
10910
10911 /* Don't walk the same tree twice, if the user has requested
10912 that we avoid doing so. */
10913 if (pset && pointer_set_insert (pset, *tp))
10914 return NULL_TREE;
10915
10916 /* Call the function. */
10917 walk_subtrees = 1;
10918 result = (*func) (tp, &walk_subtrees, data);
10919
10920 /* If we found something, return it. */
10921 if (result)
10922 return result;
10923
10924 code = TREE_CODE (*tp);
10925
10926 /* Even if we didn't, FUNC may have decided that there was nothing
10927 interesting below this point in the tree. */
10928 if (!walk_subtrees)
10929 {
10930 /* But we still need to check our siblings. */
10931 if (code == TREE_LIST)
10932 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
10933 else if (code == OMP_CLAUSE)
10934 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
10935 else
10936 return NULL_TREE;
10937 }
10938
10939 if (lh)
10940 {
10941 result = (*lh) (tp, &walk_subtrees, func, data, pset);
10942 if (result || !walk_subtrees)
10943 return result;
10944 }
10945
10946 switch (code)
10947 {
10948 case ERROR_MARK:
10949 case IDENTIFIER_NODE:
10950 case INTEGER_CST:
10951 case REAL_CST:
10952 case FIXED_CST:
10953 case VECTOR_CST:
10954 case STRING_CST:
10955 case BLOCK:
10956 case PLACEHOLDER_EXPR:
10957 case SSA_NAME:
10958 case FIELD_DECL:
10959 case RESULT_DECL:
10960 /* None of these have subtrees other than those already walked
10961 above. */
10962 break;
10963
10964 case TREE_LIST:
10965 WALK_SUBTREE (TREE_VALUE (*tp));
10966 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
10967 break;
10968
10969 case TREE_VEC:
10970 {
10971 int len = TREE_VEC_LENGTH (*tp);
10972
10973 if (len == 0)
10974 break;
10975
10976 /* Walk all elements but the first. */
10977 while (--len)
10978 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
10979
10980 /* Now walk the first one as a tail call. */
10981 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
10982 }
10983
10984 case COMPLEX_CST:
10985 WALK_SUBTREE (TREE_REALPART (*tp));
10986 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
10987
10988 case CONSTRUCTOR:
10989 {
10990 unsigned HOST_WIDE_INT idx;
10991 constructor_elt *ce;
10992
10993 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
10994 idx++)
10995 WALK_SUBTREE (ce->value);
10996 }
10997 break;
10998
10999 case SAVE_EXPR:
11000 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11001
11002 case BIND_EXPR:
11003 {
11004 tree decl;
11005 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11006 {
11007 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11008 into declarations that are just mentioned, rather than
11009 declared; they don't really belong to this part of the tree.
11010 And, we can see cycles: the initializer for a declaration
11011 can refer to the declaration itself. */
11012 WALK_SUBTREE (DECL_INITIAL (decl));
11013 WALK_SUBTREE (DECL_SIZE (decl));
11014 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11015 }
11016 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11017 }
11018
11019 case STATEMENT_LIST:
11020 {
11021 tree_stmt_iterator i;
11022 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11023 WALK_SUBTREE (*tsi_stmt_ptr (i));
11024 }
11025 break;
11026
11027 case OMP_CLAUSE:
11028 switch (OMP_CLAUSE_CODE (*tp))
11029 {
11030 case OMP_CLAUSE_PRIVATE:
11031 case OMP_CLAUSE_SHARED:
11032 case OMP_CLAUSE_FIRSTPRIVATE:
11033 case OMP_CLAUSE_COPYIN:
11034 case OMP_CLAUSE_COPYPRIVATE:
11035 case OMP_CLAUSE_FINAL:
11036 case OMP_CLAUSE_IF:
11037 case OMP_CLAUSE_NUM_THREADS:
11038 case OMP_CLAUSE_SCHEDULE:
11039 case OMP_CLAUSE_UNIFORM:
11040 case OMP_CLAUSE_DEPEND:
11041 case OMP_CLAUSE_NUM_TEAMS:
11042 case OMP_CLAUSE_THREAD_LIMIT:
11043 case OMP_CLAUSE_DEVICE:
11044 case OMP_CLAUSE_DIST_SCHEDULE:
11045 case OMP_CLAUSE_SAFELEN:
11046 case OMP_CLAUSE_SIMDLEN:
11047 case OMP_CLAUSE__LOOPTEMP_:
11048 case OMP_CLAUSE__SIMDUID_:
11049 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11050 /* FALLTHRU */
11051
11052 case OMP_CLAUSE_NOWAIT:
11053 case OMP_CLAUSE_ORDERED:
11054 case OMP_CLAUSE_DEFAULT:
11055 case OMP_CLAUSE_UNTIED:
11056 case OMP_CLAUSE_MERGEABLE:
11057 case OMP_CLAUSE_PROC_BIND:
11058 case OMP_CLAUSE_INBRANCH:
11059 case OMP_CLAUSE_NOTINBRANCH:
11060 case OMP_CLAUSE_FOR:
11061 case OMP_CLAUSE_PARALLEL:
11062 case OMP_CLAUSE_SECTIONS:
11063 case OMP_CLAUSE_TASKGROUP:
11064 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11065
11066 case OMP_CLAUSE_LASTPRIVATE:
11067 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11068 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11069 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11070
11071 case OMP_CLAUSE_COLLAPSE:
11072 {
11073 int i;
11074 for (i = 0; i < 3; i++)
11075 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11076 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11077 }
11078
11079 case OMP_CLAUSE_ALIGNED:
11080 case OMP_CLAUSE_LINEAR:
11081 case OMP_CLAUSE_FROM:
11082 case OMP_CLAUSE_TO:
11083 case OMP_CLAUSE_MAP:
11084 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11085 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11086 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11087
11088 case OMP_CLAUSE_REDUCTION:
11089 {
11090 int i;
11091 for (i = 0; i < 4; i++)
11092 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11093 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11094 }
11095
11096 default:
11097 gcc_unreachable ();
11098 }
11099 break;
11100
11101 case TARGET_EXPR:
11102 {
11103 int i, len;
11104
11105 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11106 But, we only want to walk once. */
11107 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11108 for (i = 0; i < len; ++i)
11109 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11110 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11111 }
11112
11113 case DECL_EXPR:
11114 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11115 defining. We only want to walk into these fields of a type in this
11116 case and not in the general case of a mere reference to the type.
11117
11118 The criterion is as follows: if the field can be an expression, it
11119 must be walked only here. This should be in keeping with the fields
11120 that are directly gimplified in gimplify_type_sizes in order for the
11121 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11122 variable-sized types.
11123
11124 Note that DECLs get walked as part of processing the BIND_EXPR. */
11125 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11126 {
11127 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11128 if (TREE_CODE (*type_p) == ERROR_MARK)
11129 return NULL_TREE;
11130
11131 /* Call the function for the type. See if it returns anything or
11132 doesn't want us to continue. If we are to continue, walk both
11133 the normal fields and those for the declaration case. */
11134 result = (*func) (type_p, &walk_subtrees, data);
11135 if (result || !walk_subtrees)
11136 return result;
11137
11138 /* But do not walk a pointed-to type since it may itself need to
11139 be walked in the declaration case if it isn't anonymous. */
11140 if (!POINTER_TYPE_P (*type_p))
11141 {
11142 result = walk_type_fields (*type_p, func, data, pset, lh);
11143 if (result)
11144 return result;
11145 }
11146
11147 /* If this is a record type, also walk the fields. */
11148 if (RECORD_OR_UNION_TYPE_P (*type_p))
11149 {
11150 tree field;
11151
11152 for (field = TYPE_FIELDS (*type_p); field;
11153 field = DECL_CHAIN (field))
11154 {
11155 /* We'd like to look at the type of the field, but we can
11156 easily get infinite recursion. So assume it's pointed
11157 to elsewhere in the tree. Also, ignore things that
11158 aren't fields. */
11159 if (TREE_CODE (field) != FIELD_DECL)
11160 continue;
11161
11162 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11163 WALK_SUBTREE (DECL_SIZE (field));
11164 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11165 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11166 WALK_SUBTREE (DECL_QUALIFIER (field));
11167 }
11168 }
11169
11170 /* Same for scalar types. */
11171 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11172 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11173 || TREE_CODE (*type_p) == INTEGER_TYPE
11174 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11175 || TREE_CODE (*type_p) == REAL_TYPE)
11176 {
11177 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11178 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11179 }
11180
11181 WALK_SUBTREE (TYPE_SIZE (*type_p));
11182 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11183 }
11184 /* FALLTHRU */
11185
11186 default:
11187 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11188 {
11189 int i, len;
11190
11191 /* Walk over all the sub-trees of this operand. */
11192 len = TREE_OPERAND_LENGTH (*tp);
11193
11194 /* Go through the subtrees. We need to do this in forward order so
11195 that the scope of a FOR_EXPR is handled properly. */
11196 if (len)
11197 {
11198 for (i = 0; i < len - 1; ++i)
11199 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11200 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11201 }
11202 }
11203 /* If this is a type, walk the needed fields in the type. */
11204 else if (TYPE_P (*tp))
11205 return walk_type_fields (*tp, func, data, pset, lh);
11206 break;
11207 }
11208
11209 /* We didn't find what we were looking for. */
11210 return NULL_TREE;
11211
11212 #undef WALK_SUBTREE_TAIL
11213 }
11214 #undef WALK_SUBTREE
11215
11216 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11217
11218 tree
11219 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11220 walk_tree_lh lh)
11221 {
11222 tree result;
11223 struct pointer_set_t *pset;
11224
11225 pset = pointer_set_create ();
11226 result = walk_tree_1 (tp, func, data, pset, lh);
11227 pointer_set_destroy (pset);
11228 return result;
11229 }
11230
11231
11232 tree
11233 tree_block (tree t)
11234 {
11235 char const c = TREE_CODE_CLASS (TREE_CODE (t));
11236
11237 if (IS_EXPR_CODE_CLASS (c))
11238 return LOCATION_BLOCK (t->exp.locus);
11239 gcc_unreachable ();
11240 return NULL;
11241 }
11242
11243 void
11244 tree_set_block (tree t, tree b)
11245 {
11246 char const c = TREE_CODE_CLASS (TREE_CODE (t));
11247
11248 if (IS_EXPR_CODE_CLASS (c))
11249 {
11250 if (b)
11251 t->exp.locus = COMBINE_LOCATION_DATA (line_table, t->exp.locus, b);
11252 else
11253 t->exp.locus = LOCATION_LOCUS (t->exp.locus);
11254 }
11255 else
11256 gcc_unreachable ();
11257 }
11258
11259 /* Create a nameless artificial label and put it in the current
11260 function context. The label has a location of LOC. Returns the
11261 newly created label. */
11262
11263 tree
11264 create_artificial_label (location_t loc)
11265 {
11266 tree lab = build_decl (loc,
11267 LABEL_DECL, NULL_TREE, void_type_node);
11268
11269 DECL_ARTIFICIAL (lab) = 1;
11270 DECL_IGNORED_P (lab) = 1;
11271 DECL_CONTEXT (lab) = current_function_decl;
11272 return lab;
11273 }
11274
11275 /* Given a tree, try to return a useful variable name that we can use
11276 to prefix a temporary that is being assigned the value of the tree.
11277 I.E. given <temp> = &A, return A. */
11278
11279 const char *
11280 get_name (tree t)
11281 {
11282 tree stripped_decl;
11283
11284 stripped_decl = t;
11285 STRIP_NOPS (stripped_decl);
11286 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11287 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11288 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11289 {
11290 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11291 if (!name)
11292 return NULL;
11293 return IDENTIFIER_POINTER (name);
11294 }
11295 else
11296 {
11297 switch (TREE_CODE (stripped_decl))
11298 {
11299 case ADDR_EXPR:
11300 return get_name (TREE_OPERAND (stripped_decl, 0));
11301 default:
11302 return NULL;
11303 }
11304 }
11305 }
11306
11307 /* Return true if TYPE has a variable argument list. */
11308
11309 bool
11310 stdarg_p (const_tree fntype)
11311 {
11312 function_args_iterator args_iter;
11313 tree n = NULL_TREE, t;
11314
11315 if (!fntype)
11316 return false;
11317
11318 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11319 {
11320 n = t;
11321 }
11322
11323 return n != NULL_TREE && n != void_type_node;
11324 }
11325
11326 /* Return true if TYPE has a prototype. */
11327
11328 bool
11329 prototype_p (tree fntype)
11330 {
11331 tree t;
11332
11333 gcc_assert (fntype != NULL_TREE);
11334
11335 t = TYPE_ARG_TYPES (fntype);
11336 return (t != NULL_TREE);
11337 }
11338
11339 /* If BLOCK is inlined from an __attribute__((__artificial__))
11340 routine, return pointer to location from where it has been
11341 called. */
11342 location_t *
11343 block_nonartificial_location (tree block)
11344 {
11345 location_t *ret = NULL;
11346
11347 while (block && TREE_CODE (block) == BLOCK
11348 && BLOCK_ABSTRACT_ORIGIN (block))
11349 {
11350 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11351
11352 while (TREE_CODE (ao) == BLOCK
11353 && BLOCK_ABSTRACT_ORIGIN (ao)
11354 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
11355 ao = BLOCK_ABSTRACT_ORIGIN (ao);
11356
11357 if (TREE_CODE (ao) == FUNCTION_DECL)
11358 {
11359 /* If AO is an artificial inline, point RET to the
11360 call site locus at which it has been inlined and continue
11361 the loop, in case AO's caller is also an artificial
11362 inline. */
11363 if (DECL_DECLARED_INLINE_P (ao)
11364 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11365 ret = &BLOCK_SOURCE_LOCATION (block);
11366 else
11367 break;
11368 }
11369 else if (TREE_CODE (ao) != BLOCK)
11370 break;
11371
11372 block = BLOCK_SUPERCONTEXT (block);
11373 }
11374 return ret;
11375 }
11376
11377
11378 /* If EXP is inlined from an __attribute__((__artificial__))
11379 function, return the location of the original call expression. */
11380
11381 location_t
11382 tree_nonartificial_location (tree exp)
11383 {
11384 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11385
11386 if (loc)
11387 return *loc;
11388 else
11389 return EXPR_LOCATION (exp);
11390 }
11391
11392
11393 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
11394 nodes. */
11395
11396 /* Return the hash code code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11397
11398 static hashval_t
11399 cl_option_hash_hash (const void *x)
11400 {
11401 const_tree const t = (const_tree) x;
11402 const char *p;
11403 size_t i;
11404 size_t len = 0;
11405 hashval_t hash = 0;
11406
11407 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11408 {
11409 p = (const char *)TREE_OPTIMIZATION (t);
11410 len = sizeof (struct cl_optimization);
11411 }
11412
11413 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11414 {
11415 p = (const char *)TREE_TARGET_OPTION (t);
11416 len = sizeof (struct cl_target_option);
11417 }
11418
11419 else
11420 gcc_unreachable ();
11421
11422 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11423 something else. */
11424 for (i = 0; i < len; i++)
11425 if (p[i])
11426 hash = (hash << 4) ^ ((i << 2) | p[i]);
11427
11428 return hash;
11429 }
11430
11431 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11432 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11433 same. */
11434
11435 static int
11436 cl_option_hash_eq (const void *x, const void *y)
11437 {
11438 const_tree const xt = (const_tree) x;
11439 const_tree const yt = (const_tree) y;
11440 const char *xp;
11441 const char *yp;
11442 size_t len;
11443
11444 if (TREE_CODE (xt) != TREE_CODE (yt))
11445 return 0;
11446
11447 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11448 {
11449 xp = (const char *)TREE_OPTIMIZATION (xt);
11450 yp = (const char *)TREE_OPTIMIZATION (yt);
11451 len = sizeof (struct cl_optimization);
11452 }
11453
11454 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11455 {
11456 xp = (const char *)TREE_TARGET_OPTION (xt);
11457 yp = (const char *)TREE_TARGET_OPTION (yt);
11458 len = sizeof (struct cl_target_option);
11459 }
11460
11461 else
11462 gcc_unreachable ();
11463
11464 return (memcmp (xp, yp, len) == 0);
11465 }
11466
11467 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
11468
11469 tree
11470 build_optimization_node (struct gcc_options *opts)
11471 {
11472 tree t;
11473 void **slot;
11474
11475 /* Use the cache of optimization nodes. */
11476
11477 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11478 opts);
11479
11480 slot = htab_find_slot (cl_option_hash_table, cl_optimization_node, INSERT);
11481 t = (tree) *slot;
11482 if (!t)
11483 {
11484 /* Insert this one into the hash table. */
11485 t = cl_optimization_node;
11486 *slot = t;
11487
11488 /* Make a new node for next time round. */
11489 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11490 }
11491
11492 return t;
11493 }
11494
11495 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
11496
11497 tree
11498 build_target_option_node (struct gcc_options *opts)
11499 {
11500 tree t;
11501 void **slot;
11502
11503 /* Use the cache of optimization nodes. */
11504
11505 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11506 opts);
11507
11508 slot = htab_find_slot (cl_option_hash_table, cl_target_option_node, INSERT);
11509 t = (tree) *slot;
11510 if (!t)
11511 {
11512 /* Insert this one into the hash table. */
11513 t = cl_target_option_node;
11514 *slot = t;
11515
11516 /* Make a new node for next time round. */
11517 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11518 }
11519
11520 return t;
11521 }
11522
11523 /* Determine the "ultimate origin" of a block. The block may be an inlined
11524 instance of an inlined instance of a block which is local to an inline
11525 function, so we have to trace all of the way back through the origin chain
11526 to find out what sort of node actually served as the original seed for the
11527 given block. */
11528
11529 tree
11530 block_ultimate_origin (const_tree block)
11531 {
11532 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
11533
11534 /* output_inline_function sets BLOCK_ABSTRACT_ORIGIN for all the
11535 nodes in the function to point to themselves; ignore that if
11536 we're trying to output the abstract instance of this function. */
11537 if (BLOCK_ABSTRACT (block) && immediate_origin == block)
11538 return NULL_TREE;
11539
11540 if (immediate_origin == NULL_TREE)
11541 return NULL_TREE;
11542 else
11543 {
11544 tree ret_val;
11545 tree lookahead = immediate_origin;
11546
11547 do
11548 {
11549 ret_val = lookahead;
11550 lookahead = (TREE_CODE (ret_val) == BLOCK
11551 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
11552 }
11553 while (lookahead != NULL && lookahead != ret_val);
11554
11555 /* The block's abstract origin chain may not be the *ultimate* origin of
11556 the block. It could lead to a DECL that has an abstract origin set.
11557 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
11558 will give us if it has one). Note that DECL's abstract origins are
11559 supposed to be the most distant ancestor (or so decl_ultimate_origin
11560 claims), so we don't need to loop following the DECL origins. */
11561 if (DECL_P (ret_val))
11562 return DECL_ORIGIN (ret_val);
11563
11564 return ret_val;
11565 }
11566 }
11567
11568 /* Return true iff conversion in EXP generates no instruction. Mark
11569 it inline so that we fully inline into the stripping functions even
11570 though we have two uses of this function. */
11571
11572 static inline bool
11573 tree_nop_conversion (const_tree exp)
11574 {
11575 tree outer_type, inner_type;
11576
11577 if (!CONVERT_EXPR_P (exp)
11578 && TREE_CODE (exp) != NON_LVALUE_EXPR)
11579 return false;
11580 if (TREE_OPERAND (exp, 0) == error_mark_node)
11581 return false;
11582
11583 outer_type = TREE_TYPE (exp);
11584 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11585
11586 if (!inner_type)
11587 return false;
11588
11589 /* Use precision rather then machine mode when we can, which gives
11590 the correct answer even for submode (bit-field) types. */
11591 if ((INTEGRAL_TYPE_P (outer_type)
11592 || POINTER_TYPE_P (outer_type)
11593 || TREE_CODE (outer_type) == OFFSET_TYPE)
11594 && (INTEGRAL_TYPE_P (inner_type)
11595 || POINTER_TYPE_P (inner_type)
11596 || TREE_CODE (inner_type) == OFFSET_TYPE))
11597 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
11598
11599 /* Otherwise fall back on comparing machine modes (e.g. for
11600 aggregate types, floats). */
11601 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
11602 }
11603
11604 /* Return true iff conversion in EXP generates no instruction. Don't
11605 consider conversions changing the signedness. */
11606
11607 static bool
11608 tree_sign_nop_conversion (const_tree exp)
11609 {
11610 tree outer_type, inner_type;
11611
11612 if (!tree_nop_conversion (exp))
11613 return false;
11614
11615 outer_type = TREE_TYPE (exp);
11616 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11617
11618 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
11619 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
11620 }
11621
11622 /* Strip conversions from EXP according to tree_nop_conversion and
11623 return the resulting expression. */
11624
11625 tree
11626 tree_strip_nop_conversions (tree exp)
11627 {
11628 while (tree_nop_conversion (exp))
11629 exp = TREE_OPERAND (exp, 0);
11630 return exp;
11631 }
11632
11633 /* Strip conversions from EXP according to tree_sign_nop_conversion
11634 and return the resulting expression. */
11635
11636 tree
11637 tree_strip_sign_nop_conversions (tree exp)
11638 {
11639 while (tree_sign_nop_conversion (exp))
11640 exp = TREE_OPERAND (exp, 0);
11641 return exp;
11642 }
11643
11644 /* Avoid any floating point extensions from EXP. */
11645 tree
11646 strip_float_extensions (tree exp)
11647 {
11648 tree sub, expt, subt;
11649
11650 /* For floating point constant look up the narrowest type that can hold
11651 it properly and handle it like (type)(narrowest_type)constant.
11652 This way we can optimize for instance a=a*2.0 where "a" is float
11653 but 2.0 is double constant. */
11654 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
11655 {
11656 REAL_VALUE_TYPE orig;
11657 tree type = NULL;
11658
11659 orig = TREE_REAL_CST (exp);
11660 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
11661 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
11662 type = float_type_node;
11663 else if (TYPE_PRECISION (TREE_TYPE (exp))
11664 > TYPE_PRECISION (double_type_node)
11665 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
11666 type = double_type_node;
11667 if (type)
11668 return build_real (type, real_value_truncate (TYPE_MODE (type), orig));
11669 }
11670
11671 if (!CONVERT_EXPR_P (exp))
11672 return exp;
11673
11674 sub = TREE_OPERAND (exp, 0);
11675 subt = TREE_TYPE (sub);
11676 expt = TREE_TYPE (exp);
11677
11678 if (!FLOAT_TYPE_P (subt))
11679 return exp;
11680
11681 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
11682 return exp;
11683
11684 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
11685 return exp;
11686
11687 return strip_float_extensions (sub);
11688 }
11689
11690 /* Strip out all handled components that produce invariant
11691 offsets. */
11692
11693 const_tree
11694 strip_invariant_refs (const_tree op)
11695 {
11696 while (handled_component_p (op))
11697 {
11698 switch (TREE_CODE (op))
11699 {
11700 case ARRAY_REF:
11701 case ARRAY_RANGE_REF:
11702 if (!is_gimple_constant (TREE_OPERAND (op, 1))
11703 || TREE_OPERAND (op, 2) != NULL_TREE
11704 || TREE_OPERAND (op, 3) != NULL_TREE)
11705 return NULL;
11706 break;
11707
11708 case COMPONENT_REF:
11709 if (TREE_OPERAND (op, 2) != NULL_TREE)
11710 return NULL;
11711 break;
11712
11713 default:;
11714 }
11715 op = TREE_OPERAND (op, 0);
11716 }
11717
11718 return op;
11719 }
11720
11721 static GTY(()) tree gcc_eh_personality_decl;
11722
11723 /* Return the GCC personality function decl. */
11724
11725 tree
11726 lhd_gcc_personality (void)
11727 {
11728 if (!gcc_eh_personality_decl)
11729 gcc_eh_personality_decl = build_personality_function ("gcc");
11730 return gcc_eh_personality_decl;
11731 }
11732
11733 /* For languages with One Definition Rule, work out if
11734 trees are actually the same even if the tree representation
11735 differs. This handles only decls appearing in TYPE_NAME
11736 and TYPE_CONTEXT. That is NAMESPACE_DECL, TYPE_DECL,
11737 RECORD_TYPE and IDENTIFIER_NODE. */
11738
11739 static bool
11740 same_for_odr (tree t1, tree t2)
11741 {
11742 if (t1 == t2)
11743 return true;
11744 if (!t1 || !t2)
11745 return false;
11746 /* C and C++ FEs differ by using IDENTIFIER_NODE and TYPE_DECL. */
11747 if (TREE_CODE (t1) == IDENTIFIER_NODE
11748 && TREE_CODE (t2) == TYPE_DECL
11749 && DECL_FILE_SCOPE_P (t1))
11750 {
11751 t2 = DECL_NAME (t2);
11752 gcc_assert (TREE_CODE (t2) == IDENTIFIER_NODE);
11753 }
11754 if (TREE_CODE (t2) == IDENTIFIER_NODE
11755 && TREE_CODE (t1) == TYPE_DECL
11756 && DECL_FILE_SCOPE_P (t2))
11757 {
11758 t1 = DECL_NAME (t1);
11759 gcc_assert (TREE_CODE (t1) == IDENTIFIER_NODE);
11760 }
11761 if (TREE_CODE (t1) != TREE_CODE (t2))
11762 return false;
11763 if (TYPE_P (t1))
11764 return types_same_for_odr (t1, t2);
11765 if (DECL_P (t1))
11766 return decls_same_for_odr (t1, t2);
11767 return false;
11768 }
11769
11770 /* For languages with One Definition Rule, work out if
11771 decls are actually the same even if the tree representation
11772 differs. This handles only decls appearing in TYPE_NAME
11773 and TYPE_CONTEXT. That is NAMESPACE_DECL, TYPE_DECL,
11774 RECORD_TYPE and IDENTIFIER_NODE. */
11775
11776 static bool
11777 decls_same_for_odr (tree decl1, tree decl2)
11778 {
11779 if (decl1 && TREE_CODE (decl1) == TYPE_DECL
11780 && DECL_ORIGINAL_TYPE (decl1))
11781 decl1 = DECL_ORIGINAL_TYPE (decl1);
11782 if (decl2 && TREE_CODE (decl2) == TYPE_DECL
11783 && DECL_ORIGINAL_TYPE (decl2))
11784 decl2 = DECL_ORIGINAL_TYPE (decl2);
11785 if (decl1 == decl2)
11786 return true;
11787 if (!decl1 || !decl2)
11788 return false;
11789 gcc_checking_assert (DECL_P (decl1) && DECL_P (decl2));
11790 if (TREE_CODE (decl1) != TREE_CODE (decl2))
11791 return false;
11792 if (TREE_CODE (decl1) == TRANSLATION_UNIT_DECL)
11793 return true;
11794 if (TREE_CODE (decl1) != NAMESPACE_DECL
11795 && TREE_CODE (decl1) != TYPE_DECL)
11796 return false;
11797 if (!DECL_NAME (decl1))
11798 return false;
11799 gcc_checking_assert (TREE_CODE (DECL_NAME (decl1)) == IDENTIFIER_NODE);
11800 gcc_checking_assert (!DECL_NAME (decl2)
11801 || TREE_CODE (DECL_NAME (decl2)) == IDENTIFIER_NODE);
11802 if (DECL_NAME (decl1) != DECL_NAME (decl2))
11803 return false;
11804 return same_for_odr (DECL_CONTEXT (decl1),
11805 DECL_CONTEXT (decl2));
11806 }
11807
11808 /* For languages with One Definition Rule, work out if
11809 types are same even if the tree representation differs.
11810 This is non-trivial for LTO where minnor differences in
11811 the type representation may have prevented type merging
11812 to merge two copies of otherwise equivalent type. */
11813
11814 bool
11815 types_same_for_odr (tree type1, tree type2)
11816 {
11817 gcc_checking_assert (TYPE_P (type1) && TYPE_P (type2));
11818 type1 = TYPE_MAIN_VARIANT (type1);
11819 type2 = TYPE_MAIN_VARIANT (type2);
11820 if (type1 == type2)
11821 return true;
11822
11823 #ifndef ENABLE_CHECKING
11824 if (!in_lto_p)
11825 return false;
11826 #endif
11827
11828 /* Check for anonymous namespaces. Those have !TREE_PUBLIC
11829 on the corresponding TYPE_STUB_DECL. */
11830 if (type_in_anonymous_namespace_p (type1)
11831 || type_in_anonymous_namespace_p (type2))
11832 return false;
11833 /* When assembler name of virtual table is available, it is
11834 easy to compare types for equivalence. */
11835 if (TYPE_BINFO (type1) && TYPE_BINFO (type2)
11836 && BINFO_VTABLE (TYPE_BINFO (type1))
11837 && BINFO_VTABLE (TYPE_BINFO (type2)))
11838 {
11839 tree v1 = BINFO_VTABLE (TYPE_BINFO (type1));
11840 tree v2 = BINFO_VTABLE (TYPE_BINFO (type2));
11841
11842 if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
11843 {
11844 if (TREE_CODE (v2) != POINTER_PLUS_EXPR
11845 || !operand_equal_p (TREE_OPERAND (v1, 1),
11846 TREE_OPERAND (v2, 1), 0))
11847 return false;
11848 v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
11849 v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
11850 }
11851 v1 = DECL_ASSEMBLER_NAME (v1);
11852 v2 = DECL_ASSEMBLER_NAME (v2);
11853 return (v1 == v2);
11854 }
11855
11856 /* FIXME: the code comparing type names consider all instantiations of the
11857 same template to have same name. This is because we have no access
11858 to template parameters. For types with no virtual method tables
11859 we thus can return false positives. At the moment we do not need
11860 to compare types in other scenarios than devirtualization. */
11861
11862 /* If types are not structuraly same, do not bother to contnue.
11863 Match in the remainder of code would mean ODR violation. */
11864 if (!types_compatible_p (type1, type2))
11865 return false;
11866 if (!TYPE_NAME (type1))
11867 return false;
11868 if (!decls_same_for_odr (TYPE_NAME (type1), TYPE_NAME (type2)))
11869 return false;
11870 if (!same_for_odr (TYPE_CONTEXT (type1), TYPE_CONTEXT (type2)))
11871 return false;
11872 /* When not in LTO the MAIN_VARIANT check should be the same. */
11873 gcc_assert (in_lto_p);
11874
11875 return true;
11876 }
11877
11878 /* TARGET is a call target of GIMPLE call statement
11879 (obtained by gimple_call_fn). Return true if it is
11880 OBJ_TYPE_REF representing an virtual call of C++ method.
11881 (As opposed to OBJ_TYPE_REF representing objc calls
11882 through a cast where middle-end devirtualization machinery
11883 can't apply.) */
11884
11885 bool
11886 virtual_method_call_p (tree target)
11887 {
11888 if (TREE_CODE (target) != OBJ_TYPE_REF)
11889 return false;
11890 target = TREE_TYPE (target);
11891 gcc_checking_assert (TREE_CODE (target) == POINTER_TYPE);
11892 target = TREE_TYPE (target);
11893 if (TREE_CODE (target) == FUNCTION_TYPE)
11894 return false;
11895 gcc_checking_assert (TREE_CODE (target) == METHOD_TYPE);
11896 return true;
11897 }
11898
11899 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
11900
11901 tree
11902 obj_type_ref_class (tree ref)
11903 {
11904 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
11905 ref = TREE_TYPE (ref);
11906 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
11907 ref = TREE_TYPE (ref);
11908 /* We look for type THIS points to. ObjC also builds
11909 OBJ_TYPE_REF with non-method calls, Their first parameter
11910 ID however also corresponds to class type. */
11911 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
11912 || TREE_CODE (ref) == FUNCTION_TYPE);
11913 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
11914 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
11915 return TREE_TYPE (ref);
11916 }
11917
11918 /* Return true if T is in anonymous namespace. */
11919
11920 bool
11921 type_in_anonymous_namespace_p (tree t)
11922 {
11923 return (TYPE_STUB_DECL (t) && !TREE_PUBLIC (TYPE_STUB_DECL (t)));
11924 }
11925
11926 /* Try to find a base info of BINFO that would have its field decl at offset
11927 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
11928 found, return, otherwise return NULL_TREE. */
11929
11930 tree
11931 get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
11932 {
11933 tree type = BINFO_TYPE (binfo);
11934
11935 while (true)
11936 {
11937 HOST_WIDE_INT pos, size;
11938 tree fld;
11939 int i;
11940
11941 if (types_same_for_odr (type, expected_type))
11942 return binfo;
11943 if (offset < 0)
11944 return NULL_TREE;
11945
11946 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
11947 {
11948 if (TREE_CODE (fld) != FIELD_DECL)
11949 continue;
11950
11951 pos = int_bit_position (fld);
11952 size = tree_low_cst (DECL_SIZE (fld), 1);
11953 if (pos <= offset && (pos + size) > offset)
11954 break;
11955 }
11956 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
11957 return NULL_TREE;
11958
11959 if (!DECL_ARTIFICIAL (fld))
11960 {
11961 binfo = TYPE_BINFO (TREE_TYPE (fld));
11962 if (!binfo)
11963 return NULL_TREE;
11964 }
11965 /* Offset 0 indicates the primary base, whose vtable contents are
11966 represented in the binfo for the derived class. */
11967 else if (offset != 0)
11968 {
11969 tree base_binfo, found_binfo = NULL_TREE;
11970 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
11971 if (types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
11972 {
11973 found_binfo = base_binfo;
11974 break;
11975 }
11976 if (!found_binfo)
11977 return NULL_TREE;
11978 binfo = found_binfo;
11979 }
11980
11981 type = TREE_TYPE (fld);
11982 offset -= pos;
11983 }
11984 }
11985
11986 /* Returns true if X is a typedef decl. */
11987
11988 bool
11989 is_typedef_decl (tree x)
11990 {
11991 return (x && TREE_CODE (x) == TYPE_DECL
11992 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
11993 }
11994
11995 /* Returns true iff TYPE is a type variant created for a typedef. */
11996
11997 bool
11998 typedef_variant_p (tree type)
11999 {
12000 return is_typedef_decl (TYPE_NAME (type));
12001 }
12002
12003 /* Warn about a use of an identifier which was marked deprecated. */
12004 void
12005 warn_deprecated_use (tree node, tree attr)
12006 {
12007 const char *msg;
12008
12009 if (node == 0 || !warn_deprecated_decl)
12010 return;
12011
12012 if (!attr)
12013 {
12014 if (DECL_P (node))
12015 attr = DECL_ATTRIBUTES (node);
12016 else if (TYPE_P (node))
12017 {
12018 tree decl = TYPE_STUB_DECL (node);
12019 if (decl)
12020 attr = lookup_attribute ("deprecated",
12021 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12022 }
12023 }
12024
12025 if (attr)
12026 attr = lookup_attribute ("deprecated", attr);
12027
12028 if (attr)
12029 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
12030 else
12031 msg = NULL;
12032
12033 if (DECL_P (node))
12034 {
12035 expanded_location xloc = expand_location (DECL_SOURCE_LOCATION (node));
12036 if (msg)
12037 warning (OPT_Wdeprecated_declarations,
12038 "%qD is deprecated (declared at %r%s:%d%R): %s",
12039 node, "locus", xloc.file, xloc.line, msg);
12040 else
12041 warning (OPT_Wdeprecated_declarations,
12042 "%qD is deprecated (declared at %r%s:%d%R)",
12043 node, "locus", xloc.file, xloc.line);
12044 }
12045 else if (TYPE_P (node))
12046 {
12047 tree what = NULL_TREE;
12048 tree decl = TYPE_STUB_DECL (node);
12049
12050 if (TYPE_NAME (node))
12051 {
12052 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12053 what = TYPE_NAME (node);
12054 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12055 && DECL_NAME (TYPE_NAME (node)))
12056 what = DECL_NAME (TYPE_NAME (node));
12057 }
12058
12059 if (decl)
12060 {
12061 expanded_location xloc
12062 = expand_location (DECL_SOURCE_LOCATION (decl));
12063 if (what)
12064 {
12065 if (msg)
12066 warning (OPT_Wdeprecated_declarations,
12067 "%qE is deprecated (declared at %r%s:%d%R): %s",
12068 what, "locus", xloc.file, xloc.line, msg);
12069 else
12070 warning (OPT_Wdeprecated_declarations,
12071 "%qE is deprecated (declared at %r%s:%d%R)",
12072 what, "locus", xloc.file, xloc.line);
12073 }
12074 else
12075 {
12076 if (msg)
12077 warning (OPT_Wdeprecated_declarations,
12078 "type is deprecated (declared at %r%s:%d%R): %s",
12079 "locus", xloc.file, xloc.line, msg);
12080 else
12081 warning (OPT_Wdeprecated_declarations,
12082 "type is deprecated (declared at %r%s:%d%R)",
12083 "locus", xloc.file, xloc.line);
12084 }
12085 }
12086 else
12087 {
12088 if (what)
12089 {
12090 if (msg)
12091 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
12092 what, msg);
12093 else
12094 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
12095 }
12096 else
12097 {
12098 if (msg)
12099 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
12100 msg);
12101 else
12102 warning (OPT_Wdeprecated_declarations, "type is deprecated");
12103 }
12104 }
12105 }
12106 }
12107
12108 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12109 somewhere in it. */
12110
12111 bool
12112 contains_bitfld_component_ref_p (const_tree ref)
12113 {
12114 while (handled_component_p (ref))
12115 {
12116 if (TREE_CODE (ref) == COMPONENT_REF
12117 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12118 return true;
12119 ref = TREE_OPERAND (ref, 0);
12120 }
12121
12122 return false;
12123 }
12124
12125 /* Try to determine whether a TRY_CATCH expression can fall through.
12126 This is a subroutine of block_may_fallthru. */
12127
12128 static bool
12129 try_catch_may_fallthru (const_tree stmt)
12130 {
12131 tree_stmt_iterator i;
12132
12133 /* If the TRY block can fall through, the whole TRY_CATCH can
12134 fall through. */
12135 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12136 return true;
12137
12138 i = tsi_start (TREE_OPERAND (stmt, 1));
12139 switch (TREE_CODE (tsi_stmt (i)))
12140 {
12141 case CATCH_EXPR:
12142 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12143 catch expression and a body. The whole TRY_CATCH may fall
12144 through iff any of the catch bodies falls through. */
12145 for (; !tsi_end_p (i); tsi_next (&i))
12146 {
12147 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12148 return true;
12149 }
12150 return false;
12151
12152 case EH_FILTER_EXPR:
12153 /* The exception filter expression only matters if there is an
12154 exception. If the exception does not match EH_FILTER_TYPES,
12155 we will execute EH_FILTER_FAILURE, and we will fall through
12156 if that falls through. If the exception does match
12157 EH_FILTER_TYPES, the stack unwinder will continue up the
12158 stack, so we will not fall through. We don't know whether we
12159 will throw an exception which matches EH_FILTER_TYPES or not,
12160 so we just ignore EH_FILTER_TYPES and assume that we might
12161 throw an exception which doesn't match. */
12162 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12163
12164 default:
12165 /* This case represents statements to be executed when an
12166 exception occurs. Those statements are implicitly followed
12167 by a RESX statement to resume execution after the exception.
12168 So in this case the TRY_CATCH never falls through. */
12169 return false;
12170 }
12171 }
12172
12173 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12174 need not be 100% accurate; simply be conservative and return true if we
12175 don't know. This is used only to avoid stupidly generating extra code.
12176 If we're wrong, we'll just delete the extra code later. */
12177
12178 bool
12179 block_may_fallthru (const_tree block)
12180 {
12181 /* This CONST_CAST is okay because expr_last returns its argument
12182 unmodified and we assign it to a const_tree. */
12183 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12184
12185 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12186 {
12187 case GOTO_EXPR:
12188 case RETURN_EXPR:
12189 /* Easy cases. If the last statement of the block implies
12190 control transfer, then we can't fall through. */
12191 return false;
12192
12193 case SWITCH_EXPR:
12194 /* If SWITCH_LABELS is set, this is lowered, and represents a
12195 branch to a selected label and hence can not fall through.
12196 Otherwise SWITCH_BODY is set, and the switch can fall
12197 through. */
12198 return SWITCH_LABELS (stmt) == NULL_TREE;
12199
12200 case COND_EXPR:
12201 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12202 return true;
12203 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12204
12205 case BIND_EXPR:
12206 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12207
12208 case TRY_CATCH_EXPR:
12209 return try_catch_may_fallthru (stmt);
12210
12211 case TRY_FINALLY_EXPR:
12212 /* The finally clause is always executed after the try clause,
12213 so if it does not fall through, then the try-finally will not
12214 fall through. Otherwise, if the try clause does not fall
12215 through, then when the finally clause falls through it will
12216 resume execution wherever the try clause was going. So the
12217 whole try-finally will only fall through if both the try
12218 clause and the finally clause fall through. */
12219 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12220 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12221
12222 case MODIFY_EXPR:
12223 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12224 stmt = TREE_OPERAND (stmt, 1);
12225 else
12226 return true;
12227 /* FALLTHRU */
12228
12229 case CALL_EXPR:
12230 /* Functions that do not return do not fall through. */
12231 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12232
12233 case CLEANUP_POINT_EXPR:
12234 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12235
12236 case TARGET_EXPR:
12237 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12238
12239 case ERROR_MARK:
12240 return true;
12241
12242 default:
12243 return lang_hooks.block_may_fallthru (stmt);
12244 }
12245 }
12246
12247 /* True if we are using EH to handle cleanups. */
12248 static bool using_eh_for_cleanups_flag = false;
12249
12250 /* This routine is called from front ends to indicate eh should be used for
12251 cleanups. */
12252 void
12253 using_eh_for_cleanups (void)
12254 {
12255 using_eh_for_cleanups_flag = true;
12256 }
12257
12258 /* Query whether EH is used for cleanups. */
12259 bool
12260 using_eh_for_cleanups_p (void)
12261 {
12262 return using_eh_for_cleanups_flag;
12263 }
12264
12265 /* Wrapper for tree_code_name to ensure that tree code is valid */
12266 const char *
12267 get_tree_code_name (enum tree_code code)
12268 {
12269 const char *invalid = "<invalid tree code>";
12270
12271 if (code >= MAX_TREE_CODES)
12272 return invalid;
12273
12274 return tree_code_name[code];
12275 }
12276
12277 /* Drops the TREE_OVERFLOW flag from T. */
12278
12279 tree
12280 drop_tree_overflow (tree t)
12281 {
12282 gcc_checking_assert (TREE_OVERFLOW (t));
12283
12284 /* For tree codes with a sharing machinery re-build the result. */
12285 if (TREE_CODE (t) == INTEGER_CST)
12286 return build_int_cst_wide (TREE_TYPE (t),
12287 TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t));
12288
12289 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12290 and drop the flag. */
12291 t = copy_node (t);
12292 TREE_OVERFLOW (t) = 0;
12293 return t;
12294 }
12295
12296 /* Given a memory reference expression T, return its base address.
12297 The base address of a memory reference expression is the main
12298 object being referenced. For instance, the base address for
12299 'array[i].fld[j]' is 'array'. You can think of this as stripping
12300 away the offset part from a memory address.
12301
12302 This function calls handled_component_p to strip away all the inner
12303 parts of the memory reference until it reaches the base object. */
12304
12305 tree
12306 get_base_address (tree t)
12307 {
12308 while (handled_component_p (t))
12309 t = TREE_OPERAND (t, 0);
12310
12311 if ((TREE_CODE (t) == MEM_REF
12312 || TREE_CODE (t) == TARGET_MEM_REF)
12313 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12314 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12315
12316 /* ??? Either the alias oracle or all callers need to properly deal
12317 with WITH_SIZE_EXPRs before we can look through those. */
12318 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12319 return NULL_TREE;
12320
12321 return t;
12322 }
12323
12324 #include "gt-tree.h"