]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree.c
Simplify some force_fit_type calls in tree-vrp.c.
[thirdparty/gcc.git] / gcc / tree.c
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
26
27 It is intended to be language-independent, but occasionally
28 calls language-dependent routines defined (for C) in typecheck.c. */
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "tm.h"
34 #include "flags.h"
35 #include "tree.h"
36 #include "tm_p.h"
37 #include "function.h"
38 #include "obstack.h"
39 #include "toplev.h" /* get_random_seed */
40 #include "ggc.h"
41 #include "hashtab.h"
42 #include "filenames.h"
43 #include "output.h"
44 #include "target.h"
45 #include "common/common-target.h"
46 #include "langhooks.h"
47 #include "tree-inline.h"
48 #include "tree-iterator.h"
49 #include "basic-block.h"
50 #include "tree-ssa.h"
51 #include "params.h"
52 #include "pointer-set.h"
53 #include "tree-pass.h"
54 #include "langhooks-def.h"
55 #include "diagnostic.h"
56 #include "tree-diagnostic.h"
57 #include "tree-pretty-print.h"
58 #include "cgraph.h"
59 #include "except.h"
60 #include "debug.h"
61 #include "intl.h"
62 #include "wide-int.h"
63
64 /* Tree code classes. */
65
66 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
67 #define END_OF_BASE_TREE_CODES tcc_exceptional,
68
69 const enum tree_code_class tree_code_type[] = {
70 #include "all-tree.def"
71 };
72
73 #undef DEFTREECODE
74 #undef END_OF_BASE_TREE_CODES
75
76 /* Table indexed by tree code giving number of expression
77 operands beyond the fixed part of the node structure.
78 Not used for types or decls. */
79
80 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
81 #define END_OF_BASE_TREE_CODES 0,
82
83 const unsigned char tree_code_length[] = {
84 #include "all-tree.def"
85 };
86
87 #undef DEFTREECODE
88 #undef END_OF_BASE_TREE_CODES
89
90 /* Names of tree components.
91 Used for printing out the tree and error messages. */
92 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
93 #define END_OF_BASE_TREE_CODES "@dummy",
94
95 static const char *const tree_code_name[] = {
96 #include "all-tree.def"
97 };
98
99 #undef DEFTREECODE
100 #undef END_OF_BASE_TREE_CODES
101
102 /* Each tree code class has an associated string representation.
103 These must correspond to the tree_code_class entries. */
104
105 const char *const tree_code_class_strings[] =
106 {
107 "exceptional",
108 "constant",
109 "type",
110 "declaration",
111 "reference",
112 "comparison",
113 "unary",
114 "binary",
115 "statement",
116 "vl_exp",
117 "expression"
118 };
119
120 /* obstack.[ch] explicitly declined to prototype this. */
121 extern int _obstack_allocated_p (struct obstack *h, void *obj);
122
123 /* Statistics-gathering stuff. */
124
125 static int tree_code_counts[MAX_TREE_CODES];
126 int tree_node_counts[(int) all_kinds];
127 int tree_node_sizes[(int) all_kinds];
128
129 /* Keep in sync with tree.h:enum tree_node_kind. */
130 static const char * const tree_node_kind_names[] = {
131 "decls",
132 "types",
133 "blocks",
134 "stmts",
135 "refs",
136 "exprs",
137 "constants",
138 "identifiers",
139 "vecs",
140 "binfos",
141 "ssa names",
142 "constructors",
143 "random kinds",
144 "lang_decl kinds",
145 "lang_type kinds",
146 "omp clauses",
147 };
148
149 /* Unique id for next decl created. */
150 static GTY(()) int next_decl_uid;
151 /* Unique id for next type created. */
152 static GTY(()) int next_type_uid = 1;
153 /* Unique id for next debug decl created. Use negative numbers,
154 to catch erroneous uses. */
155 static GTY(()) int next_debug_decl_uid;
156
157 /* Since we cannot rehash a type after it is in the table, we have to
158 keep the hash code. */
159
160 struct GTY(()) type_hash {
161 unsigned long hash;
162 tree type;
163 };
164
165 /* Initial size of the hash table (rounded to next prime). */
166 #define TYPE_HASH_INITIAL_SIZE 1000
167
168 /* Now here is the hash table. When recording a type, it is added to
169 the slot whose index is the hash code. Note that the hash table is
170 used for several kinds of types (function types, array types and
171 array index range types, for now). While all these live in the
172 same table, they are completely independent, and the hash code is
173 computed differently for each of these. */
174
175 static GTY ((if_marked ("type_hash_marked_p"), param_is (struct type_hash)))
176 htab_t type_hash_table;
177
178 /* Hash table and temporary node for larger integer const values. */
179 static GTY (()) tree int_cst_node;
180 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
181 htab_t int_cst_hash_table;
182
183 /* Hash table for optimization flags and target option flags. Use the same
184 hash table for both sets of options. Nodes for building the current
185 optimization and target option nodes. The assumption is most of the time
186 the options created will already be in the hash table, so we avoid
187 allocating and freeing up a node repeatably. */
188 static GTY (()) tree cl_optimization_node;
189 static GTY (()) tree cl_target_option_node;
190 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
191 htab_t cl_option_hash_table;
192
193 /* General tree->tree mapping structure for use in hash tables. */
194
195
196 static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map)))
197 htab_t debug_expr_for_decl;
198
199 static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map)))
200 htab_t value_expr_for_decl;
201
202 static GTY ((if_marked ("tree_vec_map_marked_p"), param_is (struct tree_vec_map)))
203 htab_t debug_args_for_decl;
204
205 static GTY ((if_marked ("tree_priority_map_marked_p"),
206 param_is (struct tree_priority_map)))
207 htab_t init_priority_for_decl;
208
209 static void set_type_quals (tree, int);
210 static int type_hash_eq (const void *, const void *);
211 static hashval_t type_hash_hash (const void *);
212 static hashval_t int_cst_hash_hash (const void *);
213 static int int_cst_hash_eq (const void *, const void *);
214 static hashval_t cl_option_hash_hash (const void *);
215 static int cl_option_hash_eq (const void *, const void *);
216 static void print_type_hash_statistics (void);
217 static void print_debug_expr_statistics (void);
218 static void print_value_expr_statistics (void);
219 static int type_hash_marked_p (const void *);
220 static unsigned int type_hash_list (const_tree, hashval_t);
221 static unsigned int attribute_hash_list (const_tree, hashval_t);
222 static bool decls_same_for_odr (tree decl1, tree decl2);
223
224 tree global_trees[TI_MAX];
225 tree integer_types[itk_none];
226
227 unsigned char tree_contains_struct[MAX_TREE_CODES][64];
228
229 /* Number of operands for each OpenMP clause. */
230 unsigned const char omp_clause_num_ops[] =
231 {
232 0, /* OMP_CLAUSE_ERROR */
233 1, /* OMP_CLAUSE_PRIVATE */
234 1, /* OMP_CLAUSE_SHARED */
235 1, /* OMP_CLAUSE_FIRSTPRIVATE */
236 2, /* OMP_CLAUSE_LASTPRIVATE */
237 4, /* OMP_CLAUSE_REDUCTION */
238 1, /* OMP_CLAUSE_COPYIN */
239 1, /* OMP_CLAUSE_COPYPRIVATE */
240 2, /* OMP_CLAUSE_LINEAR */
241 2, /* OMP_CLAUSE_ALIGNED */
242 1, /* OMP_CLAUSE_DEPEND */
243 1, /* OMP_CLAUSE_UNIFORM */
244 2, /* OMP_CLAUSE_FROM */
245 2, /* OMP_CLAUSE_TO */
246 2, /* OMP_CLAUSE_MAP */
247 1, /* OMP_CLAUSE__LOOPTEMP_ */
248 1, /* OMP_CLAUSE_IF */
249 1, /* OMP_CLAUSE_NUM_THREADS */
250 1, /* OMP_CLAUSE_SCHEDULE */
251 0, /* OMP_CLAUSE_NOWAIT */
252 0, /* OMP_CLAUSE_ORDERED */
253 0, /* OMP_CLAUSE_DEFAULT */
254 3, /* OMP_CLAUSE_COLLAPSE */
255 0, /* OMP_CLAUSE_UNTIED */
256 1, /* OMP_CLAUSE_FINAL */
257 0, /* OMP_CLAUSE_MERGEABLE */
258 1, /* OMP_CLAUSE_DEVICE */
259 1, /* OMP_CLAUSE_DIST_SCHEDULE */
260 0, /* OMP_CLAUSE_INBRANCH */
261 0, /* OMP_CLAUSE_NOTINBRANCH */
262 1, /* OMP_CLAUSE_NUM_TEAMS */
263 1, /* OMP_CLAUSE_THREAD_LIMIT */
264 0, /* OMP_CLAUSE_PROC_BIND */
265 1, /* OMP_CLAUSE_SAFELEN */
266 1, /* OMP_CLAUSE_SIMDLEN */
267 0, /* OMP_CLAUSE_FOR */
268 0, /* OMP_CLAUSE_PARALLEL */
269 0, /* OMP_CLAUSE_SECTIONS */
270 0, /* OMP_CLAUSE_TASKGROUP */
271 1, /* OMP_CLAUSE__SIMDUID_ */
272 };
273
274 const char * const omp_clause_code_name[] =
275 {
276 "error_clause",
277 "private",
278 "shared",
279 "firstprivate",
280 "lastprivate",
281 "reduction",
282 "copyin",
283 "copyprivate",
284 "linear",
285 "aligned",
286 "depend",
287 "uniform",
288 "from",
289 "to",
290 "map",
291 "_looptemp_",
292 "if",
293 "num_threads",
294 "schedule",
295 "nowait",
296 "ordered",
297 "default",
298 "collapse",
299 "untied",
300 "final",
301 "mergeable",
302 "device",
303 "dist_schedule",
304 "inbranch",
305 "notinbranch",
306 "num_teams",
307 "thread_limit",
308 "proc_bind",
309 "safelen",
310 "simdlen",
311 "for",
312 "parallel",
313 "sections",
314 "taskgroup",
315 "_simduid_"
316 };
317
318
319 /* Return the tree node structure used by tree code CODE. */
320
321 static inline enum tree_node_structure_enum
322 tree_node_structure_for_code (enum tree_code code)
323 {
324 switch (TREE_CODE_CLASS (code))
325 {
326 case tcc_declaration:
327 {
328 switch (code)
329 {
330 case FIELD_DECL:
331 return TS_FIELD_DECL;
332 case PARM_DECL:
333 return TS_PARM_DECL;
334 case VAR_DECL:
335 return TS_VAR_DECL;
336 case LABEL_DECL:
337 return TS_LABEL_DECL;
338 case RESULT_DECL:
339 return TS_RESULT_DECL;
340 case DEBUG_EXPR_DECL:
341 return TS_DECL_WRTL;
342 case CONST_DECL:
343 return TS_CONST_DECL;
344 case TYPE_DECL:
345 return TS_TYPE_DECL;
346 case FUNCTION_DECL:
347 return TS_FUNCTION_DECL;
348 case TRANSLATION_UNIT_DECL:
349 return TS_TRANSLATION_UNIT_DECL;
350 default:
351 return TS_DECL_NON_COMMON;
352 }
353 }
354 case tcc_type:
355 return TS_TYPE_NON_COMMON;
356 case tcc_reference:
357 case tcc_comparison:
358 case tcc_unary:
359 case tcc_binary:
360 case tcc_expression:
361 case tcc_statement:
362 case tcc_vl_exp:
363 return TS_EXP;
364 default: /* tcc_constant and tcc_exceptional */
365 break;
366 }
367 switch (code)
368 {
369 /* tcc_constant cases. */
370 case INTEGER_CST: return TS_INT_CST;
371 case REAL_CST: return TS_REAL_CST;
372 case FIXED_CST: return TS_FIXED_CST;
373 case COMPLEX_CST: return TS_COMPLEX;
374 case VECTOR_CST: return TS_VECTOR;
375 case STRING_CST: return TS_STRING;
376 /* tcc_exceptional cases. */
377 case ERROR_MARK: return TS_COMMON;
378 case IDENTIFIER_NODE: return TS_IDENTIFIER;
379 case TREE_LIST: return TS_LIST;
380 case TREE_VEC: return TS_VEC;
381 case SSA_NAME: return TS_SSA_NAME;
382 case PLACEHOLDER_EXPR: return TS_COMMON;
383 case STATEMENT_LIST: return TS_STATEMENT_LIST;
384 case BLOCK: return TS_BLOCK;
385 case CONSTRUCTOR: return TS_CONSTRUCTOR;
386 case TREE_BINFO: return TS_BINFO;
387 case OMP_CLAUSE: return TS_OMP_CLAUSE;
388 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
389 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
390
391 default:
392 gcc_unreachable ();
393 }
394 }
395
396
397 /* Initialize tree_contains_struct to describe the hierarchy of tree
398 nodes. */
399
400 static void
401 initialize_tree_contains_struct (void)
402 {
403 unsigned i;
404
405 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
406 {
407 enum tree_code code;
408 enum tree_node_structure_enum ts_code;
409
410 code = (enum tree_code) i;
411 ts_code = tree_node_structure_for_code (code);
412
413 /* Mark the TS structure itself. */
414 tree_contains_struct[code][ts_code] = 1;
415
416 /* Mark all the structures that TS is derived from. */
417 switch (ts_code)
418 {
419 case TS_TYPED:
420 case TS_BLOCK:
421 MARK_TS_BASE (code);
422 break;
423
424 case TS_COMMON:
425 case TS_INT_CST:
426 case TS_REAL_CST:
427 case TS_FIXED_CST:
428 case TS_VECTOR:
429 case TS_STRING:
430 case TS_COMPLEX:
431 case TS_SSA_NAME:
432 case TS_CONSTRUCTOR:
433 case TS_EXP:
434 case TS_STATEMENT_LIST:
435 MARK_TS_TYPED (code);
436 break;
437
438 case TS_IDENTIFIER:
439 case TS_DECL_MINIMAL:
440 case TS_TYPE_COMMON:
441 case TS_LIST:
442 case TS_VEC:
443 case TS_BINFO:
444 case TS_OMP_CLAUSE:
445 case TS_OPTIMIZATION:
446 case TS_TARGET_OPTION:
447 MARK_TS_COMMON (code);
448 break;
449
450 case TS_TYPE_WITH_LANG_SPECIFIC:
451 MARK_TS_TYPE_COMMON (code);
452 break;
453
454 case TS_TYPE_NON_COMMON:
455 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
456 break;
457
458 case TS_DECL_COMMON:
459 MARK_TS_DECL_MINIMAL (code);
460 break;
461
462 case TS_DECL_WRTL:
463 case TS_CONST_DECL:
464 MARK_TS_DECL_COMMON (code);
465 break;
466
467 case TS_DECL_NON_COMMON:
468 MARK_TS_DECL_WITH_VIS (code);
469 break;
470
471 case TS_DECL_WITH_VIS:
472 case TS_PARM_DECL:
473 case TS_LABEL_DECL:
474 case TS_RESULT_DECL:
475 MARK_TS_DECL_WRTL (code);
476 break;
477
478 case TS_FIELD_DECL:
479 MARK_TS_DECL_COMMON (code);
480 break;
481
482 case TS_VAR_DECL:
483 MARK_TS_DECL_WITH_VIS (code);
484 break;
485
486 case TS_TYPE_DECL:
487 case TS_FUNCTION_DECL:
488 MARK_TS_DECL_NON_COMMON (code);
489 break;
490
491 case TS_TRANSLATION_UNIT_DECL:
492 MARK_TS_DECL_COMMON (code);
493 break;
494
495 default:
496 gcc_unreachable ();
497 }
498 }
499
500 /* Basic consistency checks for attributes used in fold. */
501 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
502 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
503 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
504 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
505 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
506 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
507 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
508 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
509 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
510 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
511 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
512 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
513 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
514 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
515 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
516 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
517 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
518 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
519 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
520 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
521 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
522 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
523 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
524 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
525 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
526 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
527 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
528 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
529 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
530 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
531 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
532 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
533 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
534 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
535 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
536 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
537 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
538 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
539 }
540
541
542 /* Init tree.c. */
543
544 void
545 init_ttree (void)
546 {
547 /* Initialize the hash table of types. */
548 type_hash_table = htab_create_ggc (TYPE_HASH_INITIAL_SIZE, type_hash_hash,
549 type_hash_eq, 0);
550
551 debug_expr_for_decl = htab_create_ggc (512, tree_decl_map_hash,
552 tree_decl_map_eq, 0);
553
554 value_expr_for_decl = htab_create_ggc (512, tree_decl_map_hash,
555 tree_decl_map_eq, 0);
556 init_priority_for_decl = htab_create_ggc (512, tree_priority_map_hash,
557 tree_priority_map_eq, 0);
558
559 int_cst_hash_table = htab_create_ggc (1024, int_cst_hash_hash,
560 int_cst_hash_eq, NULL);
561
562 int_cst_node = make_int_cst (1, 1);
563
564 cl_option_hash_table = htab_create_ggc (64, cl_option_hash_hash,
565 cl_option_hash_eq, NULL);
566
567 cl_optimization_node = make_node (OPTIMIZATION_NODE);
568 cl_target_option_node = make_node (TARGET_OPTION_NODE);
569
570 /* Initialize the tree_contains_struct array. */
571 initialize_tree_contains_struct ();
572 lang_hooks.init_ts ();
573 }
574
575 \f
576 /* The name of the object as the assembler will see it (but before any
577 translations made by ASM_OUTPUT_LABELREF). Often this is the same
578 as DECL_NAME. It is an IDENTIFIER_NODE. */
579 tree
580 decl_assembler_name (tree decl)
581 {
582 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
583 lang_hooks.set_decl_assembler_name (decl);
584 return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name;
585 }
586
587 /* Compare ASMNAME with the DECL_ASSEMBLER_NAME of DECL. */
588
589 bool
590 decl_assembler_name_equal (tree decl, const_tree asmname)
591 {
592 tree decl_asmname = DECL_ASSEMBLER_NAME (decl);
593 const char *decl_str;
594 const char *asmname_str;
595 bool test = false;
596
597 if (decl_asmname == asmname)
598 return true;
599
600 decl_str = IDENTIFIER_POINTER (decl_asmname);
601 asmname_str = IDENTIFIER_POINTER (asmname);
602
603
604 /* If the target assembler name was set by the user, things are trickier.
605 We have a leading '*' to begin with. After that, it's arguable what
606 is the correct thing to do with -fleading-underscore. Arguably, we've
607 historically been doing the wrong thing in assemble_alias by always
608 printing the leading underscore. Since we're not changing that, make
609 sure user_label_prefix follows the '*' before matching. */
610 if (decl_str[0] == '*')
611 {
612 size_t ulp_len = strlen (user_label_prefix);
613
614 decl_str ++;
615
616 if (ulp_len == 0)
617 test = true;
618 else if (strncmp (decl_str, user_label_prefix, ulp_len) == 0)
619 decl_str += ulp_len, test=true;
620 else
621 decl_str --;
622 }
623 if (asmname_str[0] == '*')
624 {
625 size_t ulp_len = strlen (user_label_prefix);
626
627 asmname_str ++;
628
629 if (ulp_len == 0)
630 test = true;
631 else if (strncmp (asmname_str, user_label_prefix, ulp_len) == 0)
632 asmname_str += ulp_len, test=true;
633 else
634 asmname_str --;
635 }
636
637 if (!test)
638 return false;
639 return strcmp (decl_str, asmname_str) == 0;
640 }
641
642 /* Hash asmnames ignoring the user specified marks. */
643
644 hashval_t
645 decl_assembler_name_hash (const_tree asmname)
646 {
647 if (IDENTIFIER_POINTER (asmname)[0] == '*')
648 {
649 const char *decl_str = IDENTIFIER_POINTER (asmname) + 1;
650 size_t ulp_len = strlen (user_label_prefix);
651
652 if (ulp_len == 0)
653 ;
654 else if (strncmp (decl_str, user_label_prefix, ulp_len) == 0)
655 decl_str += ulp_len;
656
657 return htab_hash_string (decl_str);
658 }
659
660 return htab_hash_string (IDENTIFIER_POINTER (asmname));
661 }
662
663 /* Compute the number of bytes occupied by a tree with code CODE.
664 This function cannot be used for nodes that have variable sizes,
665 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
666 size_t
667 tree_code_size (enum tree_code code)
668 {
669 switch (TREE_CODE_CLASS (code))
670 {
671 case tcc_declaration: /* A decl node */
672 {
673 switch (code)
674 {
675 case FIELD_DECL:
676 return sizeof (struct tree_field_decl);
677 case PARM_DECL:
678 return sizeof (struct tree_parm_decl);
679 case VAR_DECL:
680 return sizeof (struct tree_var_decl);
681 case LABEL_DECL:
682 return sizeof (struct tree_label_decl);
683 case RESULT_DECL:
684 return sizeof (struct tree_result_decl);
685 case CONST_DECL:
686 return sizeof (struct tree_const_decl);
687 case TYPE_DECL:
688 return sizeof (struct tree_type_decl);
689 case FUNCTION_DECL:
690 return sizeof (struct tree_function_decl);
691 case DEBUG_EXPR_DECL:
692 return sizeof (struct tree_decl_with_rtl);
693 default:
694 return sizeof (struct tree_decl_non_common);
695 }
696 }
697
698 case tcc_type: /* a type node */
699 return sizeof (struct tree_type_non_common);
700
701 case tcc_reference: /* a reference */
702 case tcc_expression: /* an expression */
703 case tcc_statement: /* an expression with side effects */
704 case tcc_comparison: /* a comparison expression */
705 case tcc_unary: /* a unary arithmetic expression */
706 case tcc_binary: /* a binary arithmetic expression */
707 return (sizeof (struct tree_exp)
708 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
709
710 case tcc_constant: /* a constant */
711 switch (code)
712 {
713 case INTEGER_CST: gcc_unreachable ();
714 case REAL_CST: return sizeof (struct tree_real_cst);
715 case FIXED_CST: return sizeof (struct tree_fixed_cst);
716 case COMPLEX_CST: return sizeof (struct tree_complex);
717 case VECTOR_CST: return sizeof (struct tree_vector);
718 case STRING_CST: gcc_unreachable ();
719 default:
720 return lang_hooks.tree_size (code);
721 }
722
723 case tcc_exceptional: /* something random, like an identifier. */
724 switch (code)
725 {
726 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
727 case TREE_LIST: return sizeof (struct tree_list);
728
729 case ERROR_MARK:
730 case PLACEHOLDER_EXPR: return sizeof (struct tree_common);
731
732 case TREE_VEC:
733 case OMP_CLAUSE: gcc_unreachable ();
734
735 case SSA_NAME: return sizeof (struct tree_ssa_name);
736
737 case STATEMENT_LIST: return sizeof (struct tree_statement_list);
738 case BLOCK: return sizeof (struct tree_block);
739 case CONSTRUCTOR: return sizeof (struct tree_constructor);
740 case OPTIMIZATION_NODE: return sizeof (struct tree_optimization_option);
741 case TARGET_OPTION_NODE: return sizeof (struct tree_target_option);
742
743 default:
744 return lang_hooks.tree_size (code);
745 }
746
747 default:
748 gcc_unreachable ();
749 }
750 }
751
752 /* Compute the number of bytes occupied by NODE. This routine only
753 looks at TREE_CODE, except for those nodes that have variable sizes. */
754 size_t
755 tree_size (const_tree node)
756 {
757 const enum tree_code code = TREE_CODE (node);
758 switch (code)
759 {
760 case INTEGER_CST:
761 return (sizeof (struct tree_int_cst)
762 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
763
764 case TREE_BINFO:
765 return (offsetof (struct tree_binfo, base_binfos)
766 + vec<tree, va_gc>
767 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
768
769 case TREE_VEC:
770 return (sizeof (struct tree_vec)
771 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
772
773 case VECTOR_CST:
774 return (sizeof (struct tree_vector)
775 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node)) - 1) * sizeof (tree));
776
777 case STRING_CST:
778 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
779
780 case OMP_CLAUSE:
781 return (sizeof (struct tree_omp_clause)
782 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
783 * sizeof (tree));
784
785 default:
786 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
787 return (sizeof (struct tree_exp)
788 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
789 else
790 return tree_code_size (code);
791 }
792 }
793
794 /* Record interesting allocation statistics for a tree node with CODE
795 and LENGTH. */
796
797 static void
798 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
799 size_t length ATTRIBUTE_UNUSED)
800 {
801 enum tree_code_class type = TREE_CODE_CLASS (code);
802 tree_node_kind kind;
803
804 if (!GATHER_STATISTICS)
805 return;
806
807 switch (type)
808 {
809 case tcc_declaration: /* A decl node */
810 kind = d_kind;
811 break;
812
813 case tcc_type: /* a type node */
814 kind = t_kind;
815 break;
816
817 case tcc_statement: /* an expression with side effects */
818 kind = s_kind;
819 break;
820
821 case tcc_reference: /* a reference */
822 kind = r_kind;
823 break;
824
825 case tcc_expression: /* an expression */
826 case tcc_comparison: /* a comparison expression */
827 case tcc_unary: /* a unary arithmetic expression */
828 case tcc_binary: /* a binary arithmetic expression */
829 kind = e_kind;
830 break;
831
832 case tcc_constant: /* a constant */
833 kind = c_kind;
834 break;
835
836 case tcc_exceptional: /* something random, like an identifier. */
837 switch (code)
838 {
839 case IDENTIFIER_NODE:
840 kind = id_kind;
841 break;
842
843 case TREE_VEC:
844 kind = vec_kind;
845 break;
846
847 case TREE_BINFO:
848 kind = binfo_kind;
849 break;
850
851 case SSA_NAME:
852 kind = ssa_name_kind;
853 break;
854
855 case BLOCK:
856 kind = b_kind;
857 break;
858
859 case CONSTRUCTOR:
860 kind = constr_kind;
861 break;
862
863 case OMP_CLAUSE:
864 kind = omp_clause_kind;
865 break;
866
867 default:
868 kind = x_kind;
869 break;
870 }
871 break;
872
873 case tcc_vl_exp:
874 kind = e_kind;
875 break;
876
877 default:
878 gcc_unreachable ();
879 }
880
881 tree_code_counts[(int) code]++;
882 tree_node_counts[(int) kind]++;
883 tree_node_sizes[(int) kind] += length;
884 }
885
886 /* Allocate and return a new UID from the DECL_UID namespace. */
887
888 int
889 allocate_decl_uid (void)
890 {
891 return next_decl_uid++;
892 }
893
894 /* Return a newly allocated node of code CODE. For decl and type
895 nodes, some other fields are initialized. The rest of the node is
896 initialized to zero. This function cannot be used for TREE_VEC,
897 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
898 tree_code_size.
899
900 Achoo! I got a code in the node. */
901
902 tree
903 make_node_stat (enum tree_code code MEM_STAT_DECL)
904 {
905 tree t;
906 enum tree_code_class type = TREE_CODE_CLASS (code);
907 size_t length = tree_code_size (code);
908
909 record_node_allocation_statistics (code, length);
910
911 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
912 TREE_SET_CODE (t, code);
913
914 switch (type)
915 {
916 case tcc_statement:
917 TREE_SIDE_EFFECTS (t) = 1;
918 break;
919
920 case tcc_declaration:
921 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
922 {
923 if (code == FUNCTION_DECL)
924 {
925 DECL_ALIGN (t) = FUNCTION_BOUNDARY;
926 DECL_MODE (t) = FUNCTION_MODE;
927 }
928 else
929 DECL_ALIGN (t) = 1;
930 }
931 DECL_SOURCE_LOCATION (t) = input_location;
932 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
933 DECL_UID (t) = --next_debug_decl_uid;
934 else
935 {
936 DECL_UID (t) = allocate_decl_uid ();
937 SET_DECL_PT_UID (t, -1);
938 }
939 if (TREE_CODE (t) == LABEL_DECL)
940 LABEL_DECL_UID (t) = -1;
941
942 break;
943
944 case tcc_type:
945 TYPE_UID (t) = next_type_uid++;
946 TYPE_ALIGN (t) = BITS_PER_UNIT;
947 TYPE_USER_ALIGN (t) = 0;
948 TYPE_MAIN_VARIANT (t) = t;
949 TYPE_CANONICAL (t) = t;
950
951 /* Default to no attributes for type, but let target change that. */
952 TYPE_ATTRIBUTES (t) = NULL_TREE;
953 targetm.set_default_type_attributes (t);
954
955 /* We have not yet computed the alias set for this type. */
956 TYPE_ALIAS_SET (t) = -1;
957 break;
958
959 case tcc_constant:
960 TREE_CONSTANT (t) = 1;
961 break;
962
963 case tcc_expression:
964 switch (code)
965 {
966 case INIT_EXPR:
967 case MODIFY_EXPR:
968 case VA_ARG_EXPR:
969 case PREDECREMENT_EXPR:
970 case PREINCREMENT_EXPR:
971 case POSTDECREMENT_EXPR:
972 case POSTINCREMENT_EXPR:
973 /* All of these have side-effects, no matter what their
974 operands are. */
975 TREE_SIDE_EFFECTS (t) = 1;
976 break;
977
978 default:
979 break;
980 }
981 break;
982
983 default:
984 /* Other classes need no special treatment. */
985 break;
986 }
987
988 return t;
989 }
990 \f
991 /* Return a new node with the same contents as NODE except that its
992 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
993
994 tree
995 copy_node_stat (tree node MEM_STAT_DECL)
996 {
997 tree t;
998 enum tree_code code = TREE_CODE (node);
999 size_t length;
1000
1001 gcc_assert (code != STATEMENT_LIST);
1002
1003 length = tree_size (node);
1004 record_node_allocation_statistics (code, length);
1005 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1006 memcpy (t, node, length);
1007
1008 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1009 TREE_CHAIN (t) = 0;
1010 TREE_ASM_WRITTEN (t) = 0;
1011 TREE_VISITED (t) = 0;
1012
1013 if (TREE_CODE_CLASS (code) == tcc_declaration)
1014 {
1015 if (code == DEBUG_EXPR_DECL)
1016 DECL_UID (t) = --next_debug_decl_uid;
1017 else
1018 {
1019 DECL_UID (t) = allocate_decl_uid ();
1020 if (DECL_PT_UID_SET_P (node))
1021 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1022 }
1023 if ((TREE_CODE (node) == PARM_DECL || TREE_CODE (node) == VAR_DECL)
1024 && DECL_HAS_VALUE_EXPR_P (node))
1025 {
1026 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1027 DECL_HAS_VALUE_EXPR_P (t) = 1;
1028 }
1029 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1030 if (TREE_CODE (node) == VAR_DECL)
1031 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1032 if (TREE_CODE (node) == VAR_DECL && DECL_HAS_INIT_PRIORITY_P (node))
1033 {
1034 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1035 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1036 }
1037 if (TREE_CODE (node) == FUNCTION_DECL)
1038 DECL_STRUCT_FUNCTION (t) = NULL;
1039 }
1040 else if (TREE_CODE_CLASS (code) == tcc_type)
1041 {
1042 TYPE_UID (t) = next_type_uid++;
1043 /* The following is so that the debug code for
1044 the copy is different from the original type.
1045 The two statements usually duplicate each other
1046 (because they clear fields of the same union),
1047 but the optimizer should catch that. */
1048 TYPE_SYMTAB_POINTER (t) = 0;
1049 TYPE_SYMTAB_ADDRESS (t) = 0;
1050
1051 /* Do not copy the values cache. */
1052 if (TYPE_CACHED_VALUES_P (t))
1053 {
1054 TYPE_CACHED_VALUES_P (t) = 0;
1055 TYPE_CACHED_VALUES (t) = NULL_TREE;
1056 }
1057 }
1058
1059 return t;
1060 }
1061
1062 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1063 For example, this can copy a list made of TREE_LIST nodes. */
1064
1065 tree
1066 copy_list (tree list)
1067 {
1068 tree head;
1069 tree prev, next;
1070
1071 if (list == 0)
1072 return 0;
1073
1074 head = prev = copy_node (list);
1075 next = TREE_CHAIN (list);
1076 while (next)
1077 {
1078 TREE_CHAIN (prev) = copy_node (next);
1079 prev = TREE_CHAIN (prev);
1080 next = TREE_CHAIN (next);
1081 }
1082 return head;
1083 }
1084
1085 \f
1086 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1087 INTEGER_CST with value CST and type TYPE. */
1088
1089 static unsigned int
1090 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1091 {
1092 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1093 /* We need an extra zero HWI if CST is an unsigned integer with its
1094 upper bit set, and if CST occupies a whole number of HWIs. */
1095 if (TYPE_UNSIGNED (type)
1096 && wi::neg_p (cst)
1097 && (cst.get_precision () % HOST_BITS_PER_WIDE_INT) == 0)
1098 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1099 return cst.get_len ();
1100 }
1101
1102 /* Return a new INTEGER_CST with value CST and type TYPE. */
1103
1104 static tree
1105 build_new_int_cst (tree type, const wide_int &cst)
1106 {
1107 unsigned int len = cst.get_len ();
1108 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1109 tree nt = make_int_cst (len, ext_len);
1110
1111 if (len < ext_len)
1112 {
1113 --ext_len;
1114 TREE_INT_CST_ELT (nt, ext_len) = 0;
1115 for (unsigned int i = len; i < ext_len; ++i)
1116 TREE_INT_CST_ELT (nt, i) = -1;
1117 }
1118 else if (TYPE_UNSIGNED (type)
1119 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1120 {
1121 len--;
1122 TREE_INT_CST_ELT (nt, len)
1123 = zext_hwi (cst.elt (len),
1124 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1125 }
1126
1127 for (unsigned int i = 0; i < len; i++)
1128 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1129 TREE_TYPE (nt) = type;
1130 return nt;
1131 }
1132
1133 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1134
1135 tree
1136 build_int_cst (tree type, HOST_WIDE_INT low)
1137 {
1138 /* Support legacy code. */
1139 if (!type)
1140 type = integer_type_node;
1141
1142 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1143 }
1144
1145 tree
1146 build_int_cstu (tree type, unsigned HOST_WIDE_INT cst)
1147 {
1148 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1149 }
1150
1151 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1152
1153 tree
1154 build_int_cst_type (tree type, HOST_WIDE_INT low)
1155 {
1156 gcc_assert (type);
1157 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1158 }
1159
1160 /* Constructs tree in type TYPE from with value given by CST. Signedness
1161 of CST is assumed to be the same as the signedness of TYPE. */
1162
1163 tree
1164 double_int_to_tree (tree type, double_int cst)
1165 {
1166 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1167 }
1168
1169 /* We force the wide_int CST to the range of the type TYPE by sign or
1170 zero extending it. OVERFLOWABLE indicates if we are interested in
1171 overflow of the value, when >0 we are only interested in signed
1172 overflow, for <0 we are interested in any overflow. OVERFLOWED
1173 indicates whether overflow has already occurred. CONST_OVERFLOWED
1174 indicates whether constant overflow has already occurred. We force
1175 T's value to be within range of T's type (by setting to 0 or 1 all
1176 the bits outside the type's range). We set TREE_OVERFLOWED if,
1177 OVERFLOWED is nonzero,
1178 or OVERFLOWABLE is >0 and signed overflow occurs
1179 or OVERFLOWABLE is <0 and any overflow occurs
1180 We return a new tree node for the extended wide_int. The node
1181 is shared if no overflow flags are set. */
1182
1183
1184 tree
1185 force_fit_type (tree type, const wide_int_ref &cst,
1186 int overflowable, bool overflowed)
1187 {
1188 signop sign = TYPE_SIGN (type);
1189
1190 /* If we need to set overflow flags, return a new unshared node. */
1191 if (overflowed || !wi::fits_to_tree_p (cst, type))
1192 {
1193 if (overflowed
1194 || overflowable < 0
1195 || (overflowable > 0 && sign == SIGNED))
1196 {
1197 wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign);
1198 tree t = build_new_int_cst (type, tmp);
1199 TREE_OVERFLOW (t) = 1;
1200 return t;
1201 }
1202 }
1203
1204 /* Else build a shared node. */
1205 return wide_int_to_tree (type, cst);
1206 }
1207
1208 /* These are the hash table functions for the hash table of INTEGER_CST
1209 nodes of a sizetype. */
1210
1211 /* Return the hash code code X, an INTEGER_CST. */
1212
1213 static hashval_t
1214 int_cst_hash_hash (const void *x)
1215 {
1216 const_tree const t = (const_tree) x;
1217 hashval_t code = htab_hash_pointer (TREE_TYPE (t));
1218 int i;
1219
1220 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1221 code ^= TREE_INT_CST_ELT (t, i);
1222
1223 return code;
1224 }
1225
1226 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1227 is the same as that given by *Y, which is the same. */
1228
1229 static int
1230 int_cst_hash_eq (const void *x, const void *y)
1231 {
1232 const_tree const xt = (const_tree) x;
1233 const_tree const yt = (const_tree) y;
1234
1235 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1236 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1237 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1238 return false;
1239
1240 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1241 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1242 return false;
1243
1244 return true;
1245 }
1246
1247 /* Create an INT_CST node of TYPE and value CST.
1248 The returned node is always shared. For small integers we use a
1249 per-type vector cache, for larger ones we use a single hash table.
1250 The value is extended from it's precision according to the sign of
1251 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1252 the upper bits and ensures that hashing and value equality based
1253 upon the underlying HOST_WIDE_INTs works without masking. */
1254
1255 tree
1256 wide_int_to_tree (tree type, const wide_int_ref &pcst)
1257 {
1258 tree t;
1259 int ix = -1;
1260 int limit = 0;
1261
1262 gcc_assert (type);
1263 unsigned int prec = TYPE_PRECISION (type);
1264 signop sgn = TYPE_SIGN (type);
1265
1266 /* Verify that everything is canonical. */
1267 int l = pcst.get_len ();
1268 if (l > 1)
1269 {
1270 if (pcst.elt (l - 1) == 0)
1271 gcc_assert (pcst.elt (l - 2) < 0);
1272 if (pcst.elt (l - 1) == (HOST_WIDE_INT) -1)
1273 gcc_assert (pcst.elt (l - 2) >= 0);
1274 }
1275
1276 wide_int cst = wide_int::from (pcst, prec, sgn);
1277 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1278
1279 switch (TREE_CODE (type))
1280 {
1281 case NULLPTR_TYPE:
1282 gcc_assert (cst == 0);
1283 /* Fallthru. */
1284
1285 case POINTER_TYPE:
1286 case REFERENCE_TYPE:
1287 /* Cache NULL pointer. */
1288 if (cst == 0)
1289 {
1290 limit = 1;
1291 ix = 0;
1292 }
1293 break;
1294
1295 case BOOLEAN_TYPE:
1296 /* Cache false or true. */
1297 limit = 2;
1298 if (wi::leu_p (cst, 1))
1299 ix = cst.to_uhwi ();
1300 break;
1301
1302 case INTEGER_TYPE:
1303 case OFFSET_TYPE:
1304 if (TYPE_SIGN (type) == UNSIGNED)
1305 {
1306 /* Cache 0..N */
1307 limit = INTEGER_SHARE_LIMIT;
1308
1309 /* This is a little hokie, but if the prec is smaller than
1310 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1311 obvious test will not get the correct answer. */
1312 if (prec < HOST_BITS_PER_WIDE_INT)
1313 {
1314 if (cst.to_uhwi () < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1315 ix = cst.to_uhwi ();
1316 }
1317 else if (wi::ltu_p (cst, INTEGER_SHARE_LIMIT))
1318 ix = cst.to_uhwi ();
1319 }
1320 else
1321 {
1322 /* Cache -1..N */
1323 limit = INTEGER_SHARE_LIMIT + 1;
1324
1325 if (cst == -1)
1326 ix = 0;
1327 else if (!wi::neg_p (cst))
1328 {
1329 if (prec < HOST_BITS_PER_WIDE_INT)
1330 {
1331 if (cst.to_shwi () < INTEGER_SHARE_LIMIT)
1332 ix = cst.to_shwi () + 1;
1333 }
1334 else if (wi::lts_p (cst, INTEGER_SHARE_LIMIT))
1335 ix = cst.to_shwi () + 1;
1336 }
1337 }
1338 break;
1339
1340 case ENUMERAL_TYPE:
1341 break;
1342
1343 default:
1344 gcc_unreachable ();
1345 }
1346
1347 if (ext_len == 1)
1348 {
1349 /* We just need to store a single HOST_WIDE_INT. */
1350 HOST_WIDE_INT hwi;
1351 if (TYPE_UNSIGNED (type))
1352 hwi = cst.to_uhwi ();
1353 else
1354 hwi = cst.to_shwi ();
1355 if (ix >= 0)
1356 {
1357 /* Look for it in the type's vector of small shared ints. */
1358 if (!TYPE_CACHED_VALUES_P (type))
1359 {
1360 TYPE_CACHED_VALUES_P (type) = 1;
1361 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1362 }
1363
1364 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1365 if (t)
1366 /* Make sure no one is clobbering the shared constant. */
1367 gcc_assert (TREE_TYPE (t) == type
1368 && TREE_INT_CST_NUNITS (t) == 1
1369 && TREE_INT_CST_EXT_NUNITS (t) == 1
1370 && TREE_INT_CST_ELT (t, 0) == hwi);
1371 else
1372 {
1373 /* Create a new shared int. */
1374 t = build_new_int_cst (type, cst);
1375 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1376 }
1377 }
1378 else
1379 {
1380 /* Use the cache of larger shared ints, using int_cst_node as
1381 a temporary. */
1382 void **slot;
1383
1384 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1385 TREE_TYPE (int_cst_node) = type;
1386
1387 slot = htab_find_slot (int_cst_hash_table, int_cst_node, INSERT);
1388 t = (tree) *slot;
1389 if (!t)
1390 {
1391 /* Insert this one into the hash table. */
1392 t = int_cst_node;
1393 *slot = t;
1394 /* Make a new node for next time round. */
1395 int_cst_node = make_int_cst (1, 1);
1396 }
1397 }
1398 }
1399 else
1400 {
1401 /* The value either hashes properly or we drop it on the floor
1402 for the gc to take care of. There will not be enough of them
1403 to worry about. */
1404 void **slot;
1405
1406 tree nt = build_new_int_cst (type, cst);
1407 slot = htab_find_slot (int_cst_hash_table, nt, INSERT);
1408 t = (tree) *slot;
1409 if (!t)
1410 {
1411 /* Insert this one into the hash table. */
1412 t = nt;
1413 *slot = t;
1414 }
1415 }
1416
1417 return t;
1418 }
1419
1420 void
1421 cache_integer_cst (tree t)
1422 {
1423 tree type = TREE_TYPE (t);
1424 int ix = -1;
1425 int limit = 0;
1426 int prec = TYPE_PRECISION (type);
1427
1428 gcc_assert (!TREE_OVERFLOW (t));
1429
1430 switch (TREE_CODE (type))
1431 {
1432 case NULLPTR_TYPE:
1433 gcc_assert (integer_zerop (t));
1434 /* Fallthru. */
1435
1436 case POINTER_TYPE:
1437 case REFERENCE_TYPE:
1438 /* Cache NULL pointer. */
1439 if (integer_zerop (t))
1440 {
1441 limit = 1;
1442 ix = 0;
1443 }
1444 break;
1445
1446 case BOOLEAN_TYPE:
1447 /* Cache false or true. */
1448 limit = 2;
1449 if (wi::ltu_p (t, 2))
1450 ix = TREE_INT_CST_ELT (t, 0);
1451 break;
1452
1453 case INTEGER_TYPE:
1454 case OFFSET_TYPE:
1455 if (TYPE_UNSIGNED (type))
1456 {
1457 /* Cache 0..N */
1458 limit = INTEGER_SHARE_LIMIT;
1459
1460 /* This is a little hokie, but if the prec is smaller than
1461 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1462 obvious test will not get the correct answer. */
1463 if (prec < HOST_BITS_PER_WIDE_INT)
1464 {
1465 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1466 ix = tree_to_uhwi (t);
1467 }
1468 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1469 ix = tree_to_uhwi (t);
1470 }
1471 else
1472 {
1473 /* Cache -1..N */
1474 limit = INTEGER_SHARE_LIMIT + 1;
1475
1476 if (integer_minus_onep (t))
1477 ix = 0;
1478 else if (!wi::neg_p (t))
1479 {
1480 if (prec < HOST_BITS_PER_WIDE_INT)
1481 {
1482 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1483 ix = tree_to_shwi (t) + 1;
1484 }
1485 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1486 ix = tree_to_shwi (t) + 1;
1487 }
1488 }
1489 break;
1490
1491 case ENUMERAL_TYPE:
1492 break;
1493
1494 default:
1495 gcc_unreachable ();
1496 }
1497
1498 if (ix >= 0)
1499 {
1500 /* Look for it in the type's vector of small shared ints. */
1501 if (!TYPE_CACHED_VALUES_P (type))
1502 {
1503 TYPE_CACHED_VALUES_P (type) = 1;
1504 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1505 }
1506
1507 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1508 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1509 }
1510 else
1511 {
1512 /* Use the cache of larger shared ints. */
1513 void **slot;
1514
1515 slot = htab_find_slot (int_cst_hash_table, t, INSERT);
1516 /* If there is already an entry for the number verify it's the
1517 same. */
1518 if (*slot)
1519 gcc_assert (wi::eq_p (tree (*slot), t));
1520 else
1521 /* Otherwise insert this one into the hash table. */
1522 *slot = t;
1523 }
1524 }
1525
1526
1527 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1528 and the rest are zeros. */
1529
1530 tree
1531 build_low_bits_mask (tree type, unsigned bits)
1532 {
1533 gcc_assert (bits <= TYPE_PRECISION (type));
1534
1535 return wide_int_to_tree (type, wi::mask (bits, false,
1536 TYPE_PRECISION (type)));
1537 }
1538
1539 /* Build a newly constructed TREE_VEC node of length LEN. */
1540
1541 tree
1542 make_vector_stat (unsigned len MEM_STAT_DECL)
1543 {
1544 tree t;
1545 unsigned length = (len - 1) * sizeof (tree) + sizeof (struct tree_vector);
1546
1547 record_node_allocation_statistics (VECTOR_CST, length);
1548
1549 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1550
1551 TREE_SET_CODE (t, VECTOR_CST);
1552 TREE_CONSTANT (t) = 1;
1553
1554 return t;
1555 }
1556
1557 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1558 are in a list pointed to by VALS. */
1559
1560 tree
1561 build_vector_stat (tree type, tree *vals MEM_STAT_DECL)
1562 {
1563 int over = 0;
1564 unsigned cnt = 0;
1565 tree v = make_vector (TYPE_VECTOR_SUBPARTS (type));
1566 TREE_TYPE (v) = type;
1567
1568 /* Iterate through elements and check for overflow. */
1569 for (cnt = 0; cnt < TYPE_VECTOR_SUBPARTS (type); ++cnt)
1570 {
1571 tree value = vals[cnt];
1572
1573 VECTOR_CST_ELT (v, cnt) = value;
1574
1575 /* Don't crash if we get an address constant. */
1576 if (!CONSTANT_CLASS_P (value))
1577 continue;
1578
1579 over |= TREE_OVERFLOW (value);
1580 }
1581
1582 TREE_OVERFLOW (v) = over;
1583 return v;
1584 }
1585
1586 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1587 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1588
1589 tree
1590 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1591 {
1592 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
1593 unsigned HOST_WIDE_INT idx;
1594 tree value;
1595
1596 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1597 vec[idx] = value;
1598 for (; idx < TYPE_VECTOR_SUBPARTS (type); ++idx)
1599 vec[idx] = build_zero_cst (TREE_TYPE (type));
1600
1601 return build_vector (type, vec);
1602 }
1603
1604 /* Build a vector of type VECTYPE where all the elements are SCs. */
1605 tree
1606 build_vector_from_val (tree vectype, tree sc)
1607 {
1608 int i, nunits = TYPE_VECTOR_SUBPARTS (vectype);
1609
1610 if (sc == error_mark_node)
1611 return sc;
1612
1613 /* Verify that the vector type is suitable for SC. Note that there
1614 is some inconsistency in the type-system with respect to restrict
1615 qualifications of pointers. Vector types always have a main-variant
1616 element type and the qualification is applied to the vector-type.
1617 So TREE_TYPE (vector-type) does not return a properly qualified
1618 vector element-type. */
1619 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1620 TREE_TYPE (vectype)));
1621
1622 if (CONSTANT_CLASS_P (sc))
1623 {
1624 tree *v = XALLOCAVEC (tree, nunits);
1625 for (i = 0; i < nunits; ++i)
1626 v[i] = sc;
1627 return build_vector (vectype, v);
1628 }
1629 else
1630 {
1631 vec<constructor_elt, va_gc> *v;
1632 vec_alloc (v, nunits);
1633 for (i = 0; i < nunits; ++i)
1634 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1635 return build_constructor (vectype, v);
1636 }
1637 }
1638
1639 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1640 are in the vec pointed to by VALS. */
1641 tree
1642 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
1643 {
1644 tree c = make_node (CONSTRUCTOR);
1645 unsigned int i;
1646 constructor_elt *elt;
1647 bool constant_p = true;
1648 bool side_effects_p = false;
1649
1650 TREE_TYPE (c) = type;
1651 CONSTRUCTOR_ELTS (c) = vals;
1652
1653 FOR_EACH_VEC_SAFE_ELT (vals, i, elt)
1654 {
1655 /* Mostly ctors will have elts that don't have side-effects, so
1656 the usual case is to scan all the elements. Hence a single
1657 loop for both const and side effects, rather than one loop
1658 each (with early outs). */
1659 if (!TREE_CONSTANT (elt->value))
1660 constant_p = false;
1661 if (TREE_SIDE_EFFECTS (elt->value))
1662 side_effects_p = true;
1663 }
1664
1665 TREE_SIDE_EFFECTS (c) = side_effects_p;
1666 TREE_CONSTANT (c) = constant_p;
1667
1668 return c;
1669 }
1670
1671 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1672 INDEX and VALUE. */
1673 tree
1674 build_constructor_single (tree type, tree index, tree value)
1675 {
1676 vec<constructor_elt, va_gc> *v;
1677 constructor_elt elt = {index, value};
1678
1679 vec_alloc (v, 1);
1680 v->quick_push (elt);
1681
1682 return build_constructor (type, v);
1683 }
1684
1685
1686 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1687 are in a list pointed to by VALS. */
1688 tree
1689 build_constructor_from_list (tree type, tree vals)
1690 {
1691 tree t;
1692 vec<constructor_elt, va_gc> *v = NULL;
1693
1694 if (vals)
1695 {
1696 vec_alloc (v, list_length (vals));
1697 for (t = vals; t; t = TREE_CHAIN (t))
1698 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
1699 }
1700
1701 return build_constructor (type, v);
1702 }
1703
1704 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1705 of elements, provided as index/value pairs. */
1706
1707 tree
1708 build_constructor_va (tree type, int nelts, ...)
1709 {
1710 vec<constructor_elt, va_gc> *v = NULL;
1711 va_list p;
1712
1713 va_start (p, nelts);
1714 vec_alloc (v, nelts);
1715 while (nelts--)
1716 {
1717 tree index = va_arg (p, tree);
1718 tree value = va_arg (p, tree);
1719 CONSTRUCTOR_APPEND_ELT (v, index, value);
1720 }
1721 va_end (p);
1722 return build_constructor (type, v);
1723 }
1724
1725 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1726
1727 tree
1728 build_fixed (tree type, FIXED_VALUE_TYPE f)
1729 {
1730 tree v;
1731 FIXED_VALUE_TYPE *fp;
1732
1733 v = make_node (FIXED_CST);
1734 fp = ggc_alloc_fixed_value ();
1735 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
1736
1737 TREE_TYPE (v) = type;
1738 TREE_FIXED_CST_PTR (v) = fp;
1739 return v;
1740 }
1741
1742 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1743
1744 tree
1745 build_real (tree type, REAL_VALUE_TYPE d)
1746 {
1747 tree v;
1748 REAL_VALUE_TYPE *dp;
1749 int overflow = 0;
1750
1751 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1752 Consider doing it via real_convert now. */
1753
1754 v = make_node (REAL_CST);
1755 dp = ggc_alloc_real_value ();
1756 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
1757
1758 TREE_TYPE (v) = type;
1759 TREE_REAL_CST_PTR (v) = dp;
1760 TREE_OVERFLOW (v) = overflow;
1761 return v;
1762 }
1763
1764 /* Return a new REAL_CST node whose type is TYPE
1765 and whose value is the integer value of the INTEGER_CST node I. */
1766
1767 REAL_VALUE_TYPE
1768 real_value_from_int_cst (const_tree type, const_tree i)
1769 {
1770 REAL_VALUE_TYPE d;
1771
1772 /* Clear all bits of the real value type so that we can later do
1773 bitwise comparisons to see if two values are the same. */
1774 memset (&d, 0, sizeof d);
1775
1776 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode,
1777 wide_int (i),
1778 TYPE_SIGN (TREE_TYPE (i)));
1779 return d;
1780 }
1781
1782 /* Given a tree representing an integer constant I, return a tree
1783 representing the same value as a floating-point constant of type TYPE. */
1784
1785 tree
1786 build_real_from_int_cst (tree type, const_tree i)
1787 {
1788 tree v;
1789 int overflow = TREE_OVERFLOW (i);
1790
1791 v = build_real (type, real_value_from_int_cst (type, i));
1792
1793 TREE_OVERFLOW (v) |= overflow;
1794 return v;
1795 }
1796
1797 /* Return a newly constructed STRING_CST node whose value is
1798 the LEN characters at STR.
1799 Note that for a C string literal, LEN should include the trailing NUL.
1800 The TREE_TYPE is not initialized. */
1801
1802 tree
1803 build_string (int len, const char *str)
1804 {
1805 tree s;
1806 size_t length;
1807
1808 /* Do not waste bytes provided by padding of struct tree_string. */
1809 length = len + offsetof (struct tree_string, str) + 1;
1810
1811 record_node_allocation_statistics (STRING_CST, length);
1812
1813 s = ggc_alloc_tree_node (length);
1814
1815 memset (s, 0, sizeof (struct tree_typed));
1816 TREE_SET_CODE (s, STRING_CST);
1817 TREE_CONSTANT (s) = 1;
1818 TREE_STRING_LENGTH (s) = len;
1819 memcpy (s->string.str, str, len);
1820 s->string.str[len] = '\0';
1821
1822 return s;
1823 }
1824
1825 /* Return a newly constructed COMPLEX_CST node whose value is
1826 specified by the real and imaginary parts REAL and IMAG.
1827 Both REAL and IMAG should be constant nodes. TYPE, if specified,
1828 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
1829
1830 tree
1831 build_complex (tree type, tree real, tree imag)
1832 {
1833 tree t = make_node (COMPLEX_CST);
1834
1835 TREE_REALPART (t) = real;
1836 TREE_IMAGPART (t) = imag;
1837 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
1838 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
1839 return t;
1840 }
1841
1842 /* Return a constant of arithmetic type TYPE which is the
1843 multiplicative identity of the set TYPE. */
1844
1845 tree
1846 build_one_cst (tree type)
1847 {
1848 switch (TREE_CODE (type))
1849 {
1850 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1851 case POINTER_TYPE: case REFERENCE_TYPE:
1852 case OFFSET_TYPE:
1853 return build_int_cst (type, 1);
1854
1855 case REAL_TYPE:
1856 return build_real (type, dconst1);
1857
1858 case FIXED_POINT_TYPE:
1859 /* We can only generate 1 for accum types. */
1860 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1861 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
1862
1863 case VECTOR_TYPE:
1864 {
1865 tree scalar = build_one_cst (TREE_TYPE (type));
1866
1867 return build_vector_from_val (type, scalar);
1868 }
1869
1870 case COMPLEX_TYPE:
1871 return build_complex (type,
1872 build_one_cst (TREE_TYPE (type)),
1873 build_zero_cst (TREE_TYPE (type)));
1874
1875 default:
1876 gcc_unreachable ();
1877 }
1878 }
1879
1880 /* Return an integer of type TYPE containing all 1's in as much precision as
1881 it contains, or a complex or vector whose subparts are such integers. */
1882
1883 tree
1884 build_all_ones_cst (tree type)
1885 {
1886 if (TREE_CODE (type) == COMPLEX_TYPE)
1887 {
1888 tree scalar = build_all_ones_cst (TREE_TYPE (type));
1889 return build_complex (type, scalar, scalar);
1890 }
1891 else
1892 return build_minus_one_cst (type);
1893 }
1894
1895 /* Return a constant of arithmetic type TYPE which is the
1896 opposite of the multiplicative identity of the set TYPE. */
1897
1898 tree
1899 build_minus_one_cst (tree type)
1900 {
1901 switch (TREE_CODE (type))
1902 {
1903 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1904 case POINTER_TYPE: case REFERENCE_TYPE:
1905 case OFFSET_TYPE:
1906 return build_int_cst (type, -1);
1907
1908 case REAL_TYPE:
1909 return build_real (type, dconstm1);
1910
1911 case FIXED_POINT_TYPE:
1912 /* We can only generate 1 for accum types. */
1913 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1914 return build_fixed (type, fixed_from_double_int (double_int_minus_one,
1915 TYPE_MODE (type)));
1916
1917 case VECTOR_TYPE:
1918 {
1919 tree scalar = build_minus_one_cst (TREE_TYPE (type));
1920
1921 return build_vector_from_val (type, scalar);
1922 }
1923
1924 case COMPLEX_TYPE:
1925 return build_complex (type,
1926 build_minus_one_cst (TREE_TYPE (type)),
1927 build_zero_cst (TREE_TYPE (type)));
1928
1929 default:
1930 gcc_unreachable ();
1931 }
1932 }
1933
1934 /* Build 0 constant of type TYPE. This is used by constructor folding
1935 and thus the constant should be represented in memory by
1936 zero(es). */
1937
1938 tree
1939 build_zero_cst (tree type)
1940 {
1941 switch (TREE_CODE (type))
1942 {
1943 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1944 case POINTER_TYPE: case REFERENCE_TYPE:
1945 case OFFSET_TYPE: case NULLPTR_TYPE:
1946 return build_int_cst (type, 0);
1947
1948 case REAL_TYPE:
1949 return build_real (type, dconst0);
1950
1951 case FIXED_POINT_TYPE:
1952 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
1953
1954 case VECTOR_TYPE:
1955 {
1956 tree scalar = build_zero_cst (TREE_TYPE (type));
1957
1958 return build_vector_from_val (type, scalar);
1959 }
1960
1961 case COMPLEX_TYPE:
1962 {
1963 tree zero = build_zero_cst (TREE_TYPE (type));
1964
1965 return build_complex (type, zero, zero);
1966 }
1967
1968 default:
1969 if (!AGGREGATE_TYPE_P (type))
1970 return fold_convert (type, integer_zero_node);
1971 return build_constructor (type, NULL);
1972 }
1973 }
1974
1975
1976 /* Build a BINFO with LEN language slots. */
1977
1978 tree
1979 make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL)
1980 {
1981 tree t;
1982 size_t length = (offsetof (struct tree_binfo, base_binfos)
1983 + vec<tree, va_gc>::embedded_size (base_binfos));
1984
1985 record_node_allocation_statistics (TREE_BINFO, length);
1986
1987 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1988
1989 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
1990
1991 TREE_SET_CODE (t, TREE_BINFO);
1992
1993 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
1994
1995 return t;
1996 }
1997
1998 /* Create a CASE_LABEL_EXPR tree node and return it. */
1999
2000 tree
2001 build_case_label (tree low_value, tree high_value, tree label_decl)
2002 {
2003 tree t = make_node (CASE_LABEL_EXPR);
2004
2005 TREE_TYPE (t) = void_type_node;
2006 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2007
2008 CASE_LOW (t) = low_value;
2009 CASE_HIGH (t) = high_value;
2010 CASE_LABEL (t) = label_decl;
2011 CASE_CHAIN (t) = NULL_TREE;
2012
2013 return t;
2014 }
2015
2016 /* Build a newly constructed INETEGER_CST node of length LEN. */
2017
2018 tree
2019 make_int_cst_stat (int len, int ext_len MEM_STAT_DECL)
2020 {
2021 tree t;
2022 int length = (ext_len - 1) * sizeof (tree) + sizeof (struct tree_int_cst);
2023
2024 gcc_assert (len);
2025 record_node_allocation_statistics (INTEGER_CST, length);
2026
2027 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2028
2029 TREE_SET_CODE (t, INTEGER_CST);
2030 TREE_INT_CST_NUNITS (t) = len;
2031 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2032
2033 TREE_CONSTANT (t) = 1;
2034
2035 return t;
2036 }
2037
2038 /* Build a newly constructed TREE_VEC node of length LEN. */
2039
2040 tree
2041 make_tree_vec_stat (int len MEM_STAT_DECL)
2042 {
2043 tree t;
2044 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2045
2046 record_node_allocation_statistics (TREE_VEC, length);
2047
2048 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2049
2050 TREE_SET_CODE (t, TREE_VEC);
2051 TREE_VEC_LENGTH (t) = len;
2052
2053 return t;
2054 }
2055 \f
2056 /* Return 1 if EXPR is the integer constant zero or a complex constant
2057 of zero. */
2058
2059 int
2060 integer_zerop (const_tree expr)
2061 {
2062 STRIP_NOPS (expr);
2063
2064 switch (TREE_CODE (expr))
2065 {
2066 case INTEGER_CST:
2067 return wi::eq_p (expr, 0);
2068 case COMPLEX_CST:
2069 return (integer_zerop (TREE_REALPART (expr))
2070 && integer_zerop (TREE_IMAGPART (expr)));
2071 case VECTOR_CST:
2072 {
2073 unsigned i;
2074 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2075 if (!integer_zerop (VECTOR_CST_ELT (expr, i)))
2076 return false;
2077 return true;
2078 }
2079 default:
2080 return false;
2081 }
2082 }
2083
2084 /* Return 1 if EXPR is the integer constant one or the corresponding
2085 complex constant. */
2086
2087 int
2088 integer_onep (const_tree expr)
2089 {
2090 STRIP_NOPS (expr);
2091
2092 switch (TREE_CODE (expr))
2093 {
2094 case INTEGER_CST:
2095 return wi::eq_p (expr, 1);
2096 case COMPLEX_CST:
2097 return (integer_onep (TREE_REALPART (expr))
2098 && integer_zerop (TREE_IMAGPART (expr)));
2099 case VECTOR_CST:
2100 {
2101 unsigned i;
2102 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2103 if (!integer_onep (VECTOR_CST_ELT (expr, i)))
2104 return false;
2105 return true;
2106 }
2107 default:
2108 return false;
2109 }
2110 }
2111
2112 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2113 it contains, or a complex or vector whose subparts are such integers. */
2114
2115 int
2116 integer_all_onesp (const_tree expr)
2117 {
2118 STRIP_NOPS (expr);
2119
2120 if (TREE_CODE (expr) == COMPLEX_CST
2121 && integer_all_onesp (TREE_REALPART (expr))
2122 && integer_all_onesp (TREE_IMAGPART (expr)))
2123 return 1;
2124
2125 else if (TREE_CODE (expr) == VECTOR_CST)
2126 {
2127 unsigned i;
2128 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2129 if (!integer_all_onesp (VECTOR_CST_ELT (expr, i)))
2130 return 0;
2131 return 1;
2132 }
2133
2134 else if (TREE_CODE (expr) != INTEGER_CST)
2135 return 0;
2136
2137 return wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED) == expr;
2138 }
2139
2140 /* Return 1 if EXPR is the integer constant minus one. */
2141
2142 int
2143 integer_minus_onep (const_tree expr)
2144 {
2145 STRIP_NOPS (expr);
2146
2147 if (TREE_CODE (expr) == COMPLEX_CST)
2148 return (integer_all_onesp (TREE_REALPART (expr))
2149 && integer_zerop (TREE_IMAGPART (expr)));
2150 else
2151 return integer_all_onesp (expr);
2152 }
2153
2154 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2155 one bit on). */
2156
2157 int
2158 integer_pow2p (const_tree expr)
2159 {
2160 STRIP_NOPS (expr);
2161
2162 if (TREE_CODE (expr) == COMPLEX_CST
2163 && integer_pow2p (TREE_REALPART (expr))
2164 && integer_zerop (TREE_IMAGPART (expr)))
2165 return 1;
2166
2167 if (TREE_CODE (expr) != INTEGER_CST)
2168 return 0;
2169
2170 return wi::popcount (expr) == 1;
2171 }
2172
2173 /* Return 1 if EXPR is an integer constant other than zero or a
2174 complex constant other than zero. */
2175
2176 int
2177 integer_nonzerop (const_tree expr)
2178 {
2179 STRIP_NOPS (expr);
2180
2181 return ((TREE_CODE (expr) == INTEGER_CST
2182 && !wi::eq_p (expr, 0))
2183 || (TREE_CODE (expr) == COMPLEX_CST
2184 && (integer_nonzerop (TREE_REALPART (expr))
2185 || integer_nonzerop (TREE_IMAGPART (expr)))));
2186 }
2187
2188 /* Return 1 if EXPR is the fixed-point constant zero. */
2189
2190 int
2191 fixed_zerop (const_tree expr)
2192 {
2193 return (TREE_CODE (expr) == FIXED_CST
2194 && TREE_FIXED_CST (expr).data.is_zero ());
2195 }
2196
2197 /* Return the power of two represented by a tree node known to be a
2198 power of two. */
2199
2200 int
2201 tree_log2 (const_tree expr)
2202 {
2203 STRIP_NOPS (expr);
2204
2205 if (TREE_CODE (expr) == COMPLEX_CST)
2206 return tree_log2 (TREE_REALPART (expr));
2207
2208 return wi::exact_log2 (expr);
2209 }
2210
2211 /* Similar, but return the largest integer Y such that 2 ** Y is less
2212 than or equal to EXPR. */
2213
2214 int
2215 tree_floor_log2 (const_tree expr)
2216 {
2217 STRIP_NOPS (expr);
2218
2219 if (TREE_CODE (expr) == COMPLEX_CST)
2220 return tree_log2 (TREE_REALPART (expr));
2221
2222 return wi::floor_log2 (expr);
2223 }
2224
2225 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2226 decimal float constants, so don't return 1 for them. */
2227
2228 int
2229 real_zerop (const_tree expr)
2230 {
2231 STRIP_NOPS (expr);
2232
2233 switch (TREE_CODE (expr))
2234 {
2235 case REAL_CST:
2236 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst0)
2237 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2238 case COMPLEX_CST:
2239 return real_zerop (TREE_REALPART (expr))
2240 && real_zerop (TREE_IMAGPART (expr));
2241 case VECTOR_CST:
2242 {
2243 unsigned i;
2244 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2245 if (!real_zerop (VECTOR_CST_ELT (expr, i)))
2246 return false;
2247 return true;
2248 }
2249 default:
2250 return false;
2251 }
2252 }
2253
2254 /* Return 1 if EXPR is the real constant one in real or complex form.
2255 Trailing zeroes matter for decimal float constants, so don't return
2256 1 for them. */
2257
2258 int
2259 real_onep (const_tree expr)
2260 {
2261 STRIP_NOPS (expr);
2262
2263 switch (TREE_CODE (expr))
2264 {
2265 case REAL_CST:
2266 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst1)
2267 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2268 case COMPLEX_CST:
2269 return real_onep (TREE_REALPART (expr))
2270 && real_zerop (TREE_IMAGPART (expr));
2271 case VECTOR_CST:
2272 {
2273 unsigned i;
2274 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2275 if (!real_onep (VECTOR_CST_ELT (expr, i)))
2276 return false;
2277 return true;
2278 }
2279 default:
2280 return false;
2281 }
2282 }
2283
2284 /* Return 1 if EXPR is the real constant two. Trailing zeroes matter
2285 for decimal float constants, so don't return 1 for them. */
2286
2287 int
2288 real_twop (const_tree expr)
2289 {
2290 STRIP_NOPS (expr);
2291
2292 switch (TREE_CODE (expr))
2293 {
2294 case REAL_CST:
2295 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst2)
2296 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2297 case COMPLEX_CST:
2298 return real_twop (TREE_REALPART (expr))
2299 && real_zerop (TREE_IMAGPART (expr));
2300 case VECTOR_CST:
2301 {
2302 unsigned i;
2303 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2304 if (!real_twop (VECTOR_CST_ELT (expr, i)))
2305 return false;
2306 return true;
2307 }
2308 default:
2309 return false;
2310 }
2311 }
2312
2313 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2314 matter for decimal float constants, so don't return 1 for them. */
2315
2316 int
2317 real_minus_onep (const_tree expr)
2318 {
2319 STRIP_NOPS (expr);
2320
2321 switch (TREE_CODE (expr))
2322 {
2323 case REAL_CST:
2324 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconstm1)
2325 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2326 case COMPLEX_CST:
2327 return real_minus_onep (TREE_REALPART (expr))
2328 && real_zerop (TREE_IMAGPART (expr));
2329 case VECTOR_CST:
2330 {
2331 unsigned i;
2332 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2333 if (!real_minus_onep (VECTOR_CST_ELT (expr, i)))
2334 return false;
2335 return true;
2336 }
2337 default:
2338 return false;
2339 }
2340 }
2341
2342 /* Nonzero if EXP is a constant or a cast of a constant. */
2343
2344 int
2345 really_constant_p (const_tree exp)
2346 {
2347 /* This is not quite the same as STRIP_NOPS. It does more. */
2348 while (CONVERT_EXPR_P (exp)
2349 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2350 exp = TREE_OPERAND (exp, 0);
2351 return TREE_CONSTANT (exp);
2352 }
2353 \f
2354 /* Return first list element whose TREE_VALUE is ELEM.
2355 Return 0 if ELEM is not in LIST. */
2356
2357 tree
2358 value_member (tree elem, tree list)
2359 {
2360 while (list)
2361 {
2362 if (elem == TREE_VALUE (list))
2363 return list;
2364 list = TREE_CHAIN (list);
2365 }
2366 return NULL_TREE;
2367 }
2368
2369 /* Return first list element whose TREE_PURPOSE is ELEM.
2370 Return 0 if ELEM is not in LIST. */
2371
2372 tree
2373 purpose_member (const_tree elem, tree list)
2374 {
2375 while (list)
2376 {
2377 if (elem == TREE_PURPOSE (list))
2378 return list;
2379 list = TREE_CHAIN (list);
2380 }
2381 return NULL_TREE;
2382 }
2383
2384 /* Return true if ELEM is in V. */
2385
2386 bool
2387 vec_member (const_tree elem, vec<tree, va_gc> *v)
2388 {
2389 unsigned ix;
2390 tree t;
2391 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2392 if (elem == t)
2393 return true;
2394 return false;
2395 }
2396
2397 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2398 NULL_TREE. */
2399
2400 tree
2401 chain_index (int idx, tree chain)
2402 {
2403 for (; chain && idx > 0; --idx)
2404 chain = TREE_CHAIN (chain);
2405 return chain;
2406 }
2407
2408 /* Return nonzero if ELEM is part of the chain CHAIN. */
2409
2410 int
2411 chain_member (const_tree elem, const_tree chain)
2412 {
2413 while (chain)
2414 {
2415 if (elem == chain)
2416 return 1;
2417 chain = DECL_CHAIN (chain);
2418 }
2419
2420 return 0;
2421 }
2422
2423 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2424 We expect a null pointer to mark the end of the chain.
2425 This is the Lisp primitive `length'. */
2426
2427 int
2428 list_length (const_tree t)
2429 {
2430 const_tree p = t;
2431 #ifdef ENABLE_TREE_CHECKING
2432 const_tree q = t;
2433 #endif
2434 int len = 0;
2435
2436 while (p)
2437 {
2438 p = TREE_CHAIN (p);
2439 #ifdef ENABLE_TREE_CHECKING
2440 if (len % 2)
2441 q = TREE_CHAIN (q);
2442 gcc_assert (p != q);
2443 #endif
2444 len++;
2445 }
2446
2447 return len;
2448 }
2449
2450 /* Returns the number of FIELD_DECLs in TYPE. */
2451
2452 int
2453 fields_length (const_tree type)
2454 {
2455 tree t = TYPE_FIELDS (type);
2456 int count = 0;
2457
2458 for (; t; t = DECL_CHAIN (t))
2459 if (TREE_CODE (t) == FIELD_DECL)
2460 ++count;
2461
2462 return count;
2463 }
2464
2465 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2466 UNION_TYPE TYPE, or NULL_TREE if none. */
2467
2468 tree
2469 first_field (const_tree type)
2470 {
2471 tree t = TYPE_FIELDS (type);
2472 while (t && TREE_CODE (t) != FIELD_DECL)
2473 t = TREE_CHAIN (t);
2474 return t;
2475 }
2476
2477 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2478 by modifying the last node in chain 1 to point to chain 2.
2479 This is the Lisp primitive `nconc'. */
2480
2481 tree
2482 chainon (tree op1, tree op2)
2483 {
2484 tree t1;
2485
2486 if (!op1)
2487 return op2;
2488 if (!op2)
2489 return op1;
2490
2491 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
2492 continue;
2493 TREE_CHAIN (t1) = op2;
2494
2495 #ifdef ENABLE_TREE_CHECKING
2496 {
2497 tree t2;
2498 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
2499 gcc_assert (t2 != t1);
2500 }
2501 #endif
2502
2503 return op1;
2504 }
2505
2506 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2507
2508 tree
2509 tree_last (tree chain)
2510 {
2511 tree next;
2512 if (chain)
2513 while ((next = TREE_CHAIN (chain)))
2514 chain = next;
2515 return chain;
2516 }
2517
2518 /* Reverse the order of elements in the chain T,
2519 and return the new head of the chain (old last element). */
2520
2521 tree
2522 nreverse (tree t)
2523 {
2524 tree prev = 0, decl, next;
2525 for (decl = t; decl; decl = next)
2526 {
2527 /* We shouldn't be using this function to reverse BLOCK chains; we
2528 have blocks_nreverse for that. */
2529 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
2530 next = TREE_CHAIN (decl);
2531 TREE_CHAIN (decl) = prev;
2532 prev = decl;
2533 }
2534 return prev;
2535 }
2536 \f
2537 /* Return a newly created TREE_LIST node whose
2538 purpose and value fields are PARM and VALUE. */
2539
2540 tree
2541 build_tree_list_stat (tree parm, tree value MEM_STAT_DECL)
2542 {
2543 tree t = make_node_stat (TREE_LIST PASS_MEM_STAT);
2544 TREE_PURPOSE (t) = parm;
2545 TREE_VALUE (t) = value;
2546 return t;
2547 }
2548
2549 /* Build a chain of TREE_LIST nodes from a vector. */
2550
2551 tree
2552 build_tree_list_vec_stat (const vec<tree, va_gc> *vec MEM_STAT_DECL)
2553 {
2554 tree ret = NULL_TREE;
2555 tree *pp = &ret;
2556 unsigned int i;
2557 tree t;
2558 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
2559 {
2560 *pp = build_tree_list_stat (NULL, t PASS_MEM_STAT);
2561 pp = &TREE_CHAIN (*pp);
2562 }
2563 return ret;
2564 }
2565
2566 /* Return a newly created TREE_LIST node whose
2567 purpose and value fields are PURPOSE and VALUE
2568 and whose TREE_CHAIN is CHAIN. */
2569
2570 tree
2571 tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL)
2572 {
2573 tree node;
2574
2575 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
2576 memset (node, 0, sizeof (struct tree_common));
2577
2578 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
2579
2580 TREE_SET_CODE (node, TREE_LIST);
2581 TREE_CHAIN (node) = chain;
2582 TREE_PURPOSE (node) = purpose;
2583 TREE_VALUE (node) = value;
2584 return node;
2585 }
2586
2587 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2588 trees. */
2589
2590 vec<tree, va_gc> *
2591 ctor_to_vec (tree ctor)
2592 {
2593 vec<tree, va_gc> *vec;
2594 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
2595 unsigned int ix;
2596 tree val;
2597
2598 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
2599 vec->quick_push (val);
2600
2601 return vec;
2602 }
2603 \f
2604 /* Return the size nominally occupied by an object of type TYPE
2605 when it resides in memory. The value is measured in units of bytes,
2606 and its data type is that normally used for type sizes
2607 (which is the first type created by make_signed_type or
2608 make_unsigned_type). */
2609
2610 tree
2611 size_in_bytes (const_tree type)
2612 {
2613 tree t;
2614
2615 if (type == error_mark_node)
2616 return integer_zero_node;
2617
2618 type = TYPE_MAIN_VARIANT (type);
2619 t = TYPE_SIZE_UNIT (type);
2620
2621 if (t == 0)
2622 {
2623 lang_hooks.types.incomplete_type_error (NULL_TREE, type);
2624 return size_zero_node;
2625 }
2626
2627 return t;
2628 }
2629
2630 /* Return the size of TYPE (in bytes) as a wide integer
2631 or return -1 if the size can vary or is larger than an integer. */
2632
2633 HOST_WIDE_INT
2634 int_size_in_bytes (const_tree type)
2635 {
2636 tree t;
2637
2638 if (type == error_mark_node)
2639 return 0;
2640
2641 type = TYPE_MAIN_VARIANT (type);
2642 t = TYPE_SIZE_UNIT (type);
2643
2644 if (t && cst_fits_uhwi_p (t))
2645 return tree_to_hwi (t);
2646 else
2647 return -1;
2648 }
2649
2650 /* Return the maximum size of TYPE (in bytes) as a wide integer
2651 or return -1 if the size can vary or is larger than an integer. */
2652
2653 HOST_WIDE_INT
2654 max_int_size_in_bytes (const_tree type)
2655 {
2656 HOST_WIDE_INT size = -1;
2657 tree size_tree;
2658
2659 /* If this is an array type, check for a possible MAX_SIZE attached. */
2660
2661 if (TREE_CODE (type) == ARRAY_TYPE)
2662 {
2663 size_tree = TYPE_ARRAY_MAX_SIZE (type);
2664
2665 if (size_tree && tree_fits_uhwi_p (size_tree))
2666 size = tree_to_uhwi (size_tree);
2667 }
2668
2669 /* If we still haven't been able to get a size, see if the language
2670 can compute a maximum size. */
2671
2672 if (size == -1)
2673 {
2674 size_tree = lang_hooks.types.max_size (type);
2675
2676 if (size_tree && tree_fits_uhwi_p (size_tree))
2677 size = tree_to_uhwi (size_tree);
2678 }
2679
2680 return size;
2681 }
2682
2683 /* Returns a tree for the size of EXP in bytes. */
2684
2685 tree
2686 tree_expr_size (const_tree exp)
2687 {
2688 if (DECL_P (exp)
2689 && DECL_SIZE_UNIT (exp) != 0)
2690 return DECL_SIZE_UNIT (exp);
2691 else
2692 return size_in_bytes (TREE_TYPE (exp));
2693 }
2694 \f
2695 /* Return the bit position of FIELD, in bits from the start of the record.
2696 This is a tree of type bitsizetype. */
2697
2698 tree
2699 bit_position (const_tree field)
2700 {
2701 return bit_from_pos (DECL_FIELD_OFFSET (field),
2702 DECL_FIELD_BIT_OFFSET (field));
2703 }
2704
2705 /* Likewise, but return as an integer. It must be representable in
2706 that way (since it could be a signed value, we don't have the
2707 option of returning -1 like int_size_in_byte can. */
2708
2709 HOST_WIDE_INT
2710 int_bit_position (const_tree field)
2711 {
2712 return tree_to_shwi (bit_position (field));
2713 }
2714 \f
2715 /* Return the byte position of FIELD, in bytes from the start of the record.
2716 This is a tree of type sizetype. */
2717
2718 tree
2719 byte_position (const_tree field)
2720 {
2721 return byte_from_pos (DECL_FIELD_OFFSET (field),
2722 DECL_FIELD_BIT_OFFSET (field));
2723 }
2724
2725 /* Likewise, but return as an integer. It must be representable in
2726 that way (since it could be a signed value, we don't have the
2727 option of returning -1 like int_size_in_byte can. */
2728
2729 HOST_WIDE_INT
2730 int_byte_position (const_tree field)
2731 {
2732 return tree_to_shwi (byte_position (field));
2733 }
2734 \f
2735 /* Return the strictest alignment, in bits, that T is known to have. */
2736
2737 unsigned int
2738 expr_align (const_tree t)
2739 {
2740 unsigned int align0, align1;
2741
2742 switch (TREE_CODE (t))
2743 {
2744 CASE_CONVERT: case NON_LVALUE_EXPR:
2745 /* If we have conversions, we know that the alignment of the
2746 object must meet each of the alignments of the types. */
2747 align0 = expr_align (TREE_OPERAND (t, 0));
2748 align1 = TYPE_ALIGN (TREE_TYPE (t));
2749 return MAX (align0, align1);
2750
2751 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
2752 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
2753 case CLEANUP_POINT_EXPR:
2754 /* These don't change the alignment of an object. */
2755 return expr_align (TREE_OPERAND (t, 0));
2756
2757 case COND_EXPR:
2758 /* The best we can do is say that the alignment is the least aligned
2759 of the two arms. */
2760 align0 = expr_align (TREE_OPERAND (t, 1));
2761 align1 = expr_align (TREE_OPERAND (t, 2));
2762 return MIN (align0, align1);
2763
2764 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
2765 meaningfully, it's always 1. */
2766 case LABEL_DECL: case CONST_DECL:
2767 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
2768 case FUNCTION_DECL:
2769 gcc_assert (DECL_ALIGN (t) != 0);
2770 return DECL_ALIGN (t);
2771
2772 default:
2773 break;
2774 }
2775
2776 /* Otherwise take the alignment from that of the type. */
2777 return TYPE_ALIGN (TREE_TYPE (t));
2778 }
2779 \f
2780 /* Return, as a tree node, the number of elements for TYPE (which is an
2781 ARRAY_TYPE) minus one. This counts only elements of the top array. */
2782
2783 tree
2784 array_type_nelts (const_tree type)
2785 {
2786 tree index_type, min, max;
2787
2788 /* If they did it with unspecified bounds, then we should have already
2789 given an error about it before we got here. */
2790 if (! TYPE_DOMAIN (type))
2791 return error_mark_node;
2792
2793 index_type = TYPE_DOMAIN (type);
2794 min = TYPE_MIN_VALUE (index_type);
2795 max = TYPE_MAX_VALUE (index_type);
2796
2797 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
2798 if (!max)
2799 return error_mark_node;
2800
2801 return (integer_zerop (min)
2802 ? max
2803 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
2804 }
2805 \f
2806 /* If arg is static -- a reference to an object in static storage -- then
2807 return the object. This is not the same as the C meaning of `static'.
2808 If arg isn't static, return NULL. */
2809
2810 tree
2811 staticp (tree arg)
2812 {
2813 switch (TREE_CODE (arg))
2814 {
2815 case FUNCTION_DECL:
2816 /* Nested functions are static, even though taking their address will
2817 involve a trampoline as we unnest the nested function and create
2818 the trampoline on the tree level. */
2819 return arg;
2820
2821 case VAR_DECL:
2822 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
2823 && ! DECL_THREAD_LOCAL_P (arg)
2824 && ! DECL_DLLIMPORT_P (arg)
2825 ? arg : NULL);
2826
2827 case CONST_DECL:
2828 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
2829 ? arg : NULL);
2830
2831 case CONSTRUCTOR:
2832 return TREE_STATIC (arg) ? arg : NULL;
2833
2834 case LABEL_DECL:
2835 case STRING_CST:
2836 return arg;
2837
2838 case COMPONENT_REF:
2839 /* If the thing being referenced is not a field, then it is
2840 something language specific. */
2841 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
2842
2843 /* If we are referencing a bitfield, we can't evaluate an
2844 ADDR_EXPR at compile time and so it isn't a constant. */
2845 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
2846 return NULL;
2847
2848 return staticp (TREE_OPERAND (arg, 0));
2849
2850 case BIT_FIELD_REF:
2851 return NULL;
2852
2853 case INDIRECT_REF:
2854 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
2855
2856 case ARRAY_REF:
2857 case ARRAY_RANGE_REF:
2858 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
2859 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
2860 return staticp (TREE_OPERAND (arg, 0));
2861 else
2862 return NULL;
2863
2864 case COMPOUND_LITERAL_EXPR:
2865 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
2866
2867 default:
2868 return NULL;
2869 }
2870 }
2871
2872 \f
2873
2874
2875 /* Return whether OP is a DECL whose address is function-invariant. */
2876
2877 bool
2878 decl_address_invariant_p (const_tree op)
2879 {
2880 /* The conditions below are slightly less strict than the one in
2881 staticp. */
2882
2883 switch (TREE_CODE (op))
2884 {
2885 case PARM_DECL:
2886 case RESULT_DECL:
2887 case LABEL_DECL:
2888 case FUNCTION_DECL:
2889 return true;
2890
2891 case VAR_DECL:
2892 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
2893 || DECL_THREAD_LOCAL_P (op)
2894 || DECL_CONTEXT (op) == current_function_decl
2895 || decl_function_context (op) == current_function_decl)
2896 return true;
2897 break;
2898
2899 case CONST_DECL:
2900 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
2901 || decl_function_context (op) == current_function_decl)
2902 return true;
2903 break;
2904
2905 default:
2906 break;
2907 }
2908
2909 return false;
2910 }
2911
2912 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
2913
2914 bool
2915 decl_address_ip_invariant_p (const_tree op)
2916 {
2917 /* The conditions below are slightly less strict than the one in
2918 staticp. */
2919
2920 switch (TREE_CODE (op))
2921 {
2922 case LABEL_DECL:
2923 case FUNCTION_DECL:
2924 case STRING_CST:
2925 return true;
2926
2927 case VAR_DECL:
2928 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
2929 && !DECL_DLLIMPORT_P (op))
2930 || DECL_THREAD_LOCAL_P (op))
2931 return true;
2932 break;
2933
2934 case CONST_DECL:
2935 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
2936 return true;
2937 break;
2938
2939 default:
2940 break;
2941 }
2942
2943 return false;
2944 }
2945
2946
2947 /* Return true if T is function-invariant (internal function, does
2948 not handle arithmetic; that's handled in skip_simple_arithmetic and
2949 tree_invariant_p). */
2950
2951 static bool tree_invariant_p (tree t);
2952
2953 static bool
2954 tree_invariant_p_1 (tree t)
2955 {
2956 tree op;
2957
2958 if (TREE_CONSTANT (t)
2959 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
2960 return true;
2961
2962 switch (TREE_CODE (t))
2963 {
2964 case SAVE_EXPR:
2965 return true;
2966
2967 case ADDR_EXPR:
2968 op = TREE_OPERAND (t, 0);
2969 while (handled_component_p (op))
2970 {
2971 switch (TREE_CODE (op))
2972 {
2973 case ARRAY_REF:
2974 case ARRAY_RANGE_REF:
2975 if (!tree_invariant_p (TREE_OPERAND (op, 1))
2976 || TREE_OPERAND (op, 2) != NULL_TREE
2977 || TREE_OPERAND (op, 3) != NULL_TREE)
2978 return false;
2979 break;
2980
2981 case COMPONENT_REF:
2982 if (TREE_OPERAND (op, 2) != NULL_TREE)
2983 return false;
2984 break;
2985
2986 default:;
2987 }
2988 op = TREE_OPERAND (op, 0);
2989 }
2990
2991 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
2992
2993 default:
2994 break;
2995 }
2996
2997 return false;
2998 }
2999
3000 /* Return true if T is function-invariant. */
3001
3002 static bool
3003 tree_invariant_p (tree t)
3004 {
3005 tree inner = skip_simple_arithmetic (t);
3006 return tree_invariant_p_1 (inner);
3007 }
3008
3009 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3010 Do this to any expression which may be used in more than one place,
3011 but must be evaluated only once.
3012
3013 Normally, expand_expr would reevaluate the expression each time.
3014 Calling save_expr produces something that is evaluated and recorded
3015 the first time expand_expr is called on it. Subsequent calls to
3016 expand_expr just reuse the recorded value.
3017
3018 The call to expand_expr that generates code that actually computes
3019 the value is the first call *at compile time*. Subsequent calls
3020 *at compile time* generate code to use the saved value.
3021 This produces correct result provided that *at run time* control
3022 always flows through the insns made by the first expand_expr
3023 before reaching the other places where the save_expr was evaluated.
3024 You, the caller of save_expr, must make sure this is so.
3025
3026 Constants, and certain read-only nodes, are returned with no
3027 SAVE_EXPR because that is safe. Expressions containing placeholders
3028 are not touched; see tree.def for an explanation of what these
3029 are used for. */
3030
3031 tree
3032 save_expr (tree expr)
3033 {
3034 tree t = fold (expr);
3035 tree inner;
3036
3037 /* If the tree evaluates to a constant, then we don't want to hide that
3038 fact (i.e. this allows further folding, and direct checks for constants).
3039 However, a read-only object that has side effects cannot be bypassed.
3040 Since it is no problem to reevaluate literals, we just return the
3041 literal node. */
3042 inner = skip_simple_arithmetic (t);
3043 if (TREE_CODE (inner) == ERROR_MARK)
3044 return inner;
3045
3046 if (tree_invariant_p_1 (inner))
3047 return t;
3048
3049 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3050 it means that the size or offset of some field of an object depends on
3051 the value within another field.
3052
3053 Note that it must not be the case that T contains both a PLACEHOLDER_EXPR
3054 and some variable since it would then need to be both evaluated once and
3055 evaluated more than once. Front-ends must assure this case cannot
3056 happen by surrounding any such subexpressions in their own SAVE_EXPR
3057 and forcing evaluation at the proper time. */
3058 if (contains_placeholder_p (inner))
3059 return t;
3060
3061 t = build1 (SAVE_EXPR, TREE_TYPE (expr), t);
3062 SET_EXPR_LOCATION (t, EXPR_LOCATION (expr));
3063
3064 /* This expression might be placed ahead of a jump to ensure that the
3065 value was computed on both sides of the jump. So make sure it isn't
3066 eliminated as dead. */
3067 TREE_SIDE_EFFECTS (t) = 1;
3068 return t;
3069 }
3070
3071 /* Look inside EXPR into any simple arithmetic operations. Return the
3072 outermost non-arithmetic or non-invariant node. */
3073
3074 tree
3075 skip_simple_arithmetic (tree expr)
3076 {
3077 /* We don't care about whether this can be used as an lvalue in this
3078 context. */
3079 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3080 expr = TREE_OPERAND (expr, 0);
3081
3082 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3083 a constant, it will be more efficient to not make another SAVE_EXPR since
3084 it will allow better simplification and GCSE will be able to merge the
3085 computations if they actually occur. */
3086 while (true)
3087 {
3088 if (UNARY_CLASS_P (expr))
3089 expr = TREE_OPERAND (expr, 0);
3090 else if (BINARY_CLASS_P (expr))
3091 {
3092 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3093 expr = TREE_OPERAND (expr, 0);
3094 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3095 expr = TREE_OPERAND (expr, 1);
3096 else
3097 break;
3098 }
3099 else
3100 break;
3101 }
3102
3103 return expr;
3104 }
3105
3106 /* Look inside EXPR into simple arithmetic operations involving constants.
3107 Return the outermost non-arithmetic or non-constant node. */
3108
3109 tree
3110 skip_simple_constant_arithmetic (tree expr)
3111 {
3112 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3113 expr = TREE_OPERAND (expr, 0);
3114
3115 while (true)
3116 {
3117 if (UNARY_CLASS_P (expr))
3118 expr = TREE_OPERAND (expr, 0);
3119 else if (BINARY_CLASS_P (expr))
3120 {
3121 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3122 expr = TREE_OPERAND (expr, 0);
3123 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3124 expr = TREE_OPERAND (expr, 1);
3125 else
3126 break;
3127 }
3128 else
3129 break;
3130 }
3131
3132 return expr;
3133 }
3134
3135 /* Return which tree structure is used by T. */
3136
3137 enum tree_node_structure_enum
3138 tree_node_structure (const_tree t)
3139 {
3140 const enum tree_code code = TREE_CODE (t);
3141 return tree_node_structure_for_code (code);
3142 }
3143
3144 /* Set various status flags when building a CALL_EXPR object T. */
3145
3146 static void
3147 process_call_operands (tree t)
3148 {
3149 bool side_effects = TREE_SIDE_EFFECTS (t);
3150 bool read_only = false;
3151 int i = call_expr_flags (t);
3152
3153 /* Calls have side-effects, except those to const or pure functions. */
3154 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3155 side_effects = true;
3156 /* Propagate TREE_READONLY of arguments for const functions. */
3157 if (i & ECF_CONST)
3158 read_only = true;
3159
3160 if (!side_effects || read_only)
3161 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3162 {
3163 tree op = TREE_OPERAND (t, i);
3164 if (op && TREE_SIDE_EFFECTS (op))
3165 side_effects = true;
3166 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3167 read_only = false;
3168 }
3169
3170 TREE_SIDE_EFFECTS (t) = side_effects;
3171 TREE_READONLY (t) = read_only;
3172 }
3173 \f
3174 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3175 size or offset that depends on a field within a record. */
3176
3177 bool
3178 contains_placeholder_p (const_tree exp)
3179 {
3180 enum tree_code code;
3181
3182 if (!exp)
3183 return 0;
3184
3185 code = TREE_CODE (exp);
3186 if (code == PLACEHOLDER_EXPR)
3187 return 1;
3188
3189 switch (TREE_CODE_CLASS (code))
3190 {
3191 case tcc_reference:
3192 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3193 position computations since they will be converted into a
3194 WITH_RECORD_EXPR involving the reference, which will assume
3195 here will be valid. */
3196 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3197
3198 case tcc_exceptional:
3199 if (code == TREE_LIST)
3200 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3201 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3202 break;
3203
3204 case tcc_unary:
3205 case tcc_binary:
3206 case tcc_comparison:
3207 case tcc_expression:
3208 switch (code)
3209 {
3210 case COMPOUND_EXPR:
3211 /* Ignoring the first operand isn't quite right, but works best. */
3212 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3213
3214 case COND_EXPR:
3215 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3216 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3217 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3218
3219 case SAVE_EXPR:
3220 /* The save_expr function never wraps anything containing
3221 a PLACEHOLDER_EXPR. */
3222 return 0;
3223
3224 default:
3225 break;
3226 }
3227
3228 switch (TREE_CODE_LENGTH (code))
3229 {
3230 case 1:
3231 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3232 case 2:
3233 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3234 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3235 default:
3236 return 0;
3237 }
3238
3239 case tcc_vl_exp:
3240 switch (code)
3241 {
3242 case CALL_EXPR:
3243 {
3244 const_tree arg;
3245 const_call_expr_arg_iterator iter;
3246 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3247 if (CONTAINS_PLACEHOLDER_P (arg))
3248 return 1;
3249 return 0;
3250 }
3251 default:
3252 return 0;
3253 }
3254
3255 default:
3256 return 0;
3257 }
3258 return 0;
3259 }
3260
3261 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3262 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3263 field positions. */
3264
3265 static bool
3266 type_contains_placeholder_1 (const_tree type)
3267 {
3268 /* If the size contains a placeholder or the parent type (component type in
3269 the case of arrays) type involves a placeholder, this type does. */
3270 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3271 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3272 || (!POINTER_TYPE_P (type)
3273 && TREE_TYPE (type)
3274 && type_contains_placeholder_p (TREE_TYPE (type))))
3275 return true;
3276
3277 /* Now do type-specific checks. Note that the last part of the check above
3278 greatly limits what we have to do below. */
3279 switch (TREE_CODE (type))
3280 {
3281 case VOID_TYPE:
3282 case COMPLEX_TYPE:
3283 case ENUMERAL_TYPE:
3284 case BOOLEAN_TYPE:
3285 case POINTER_TYPE:
3286 case OFFSET_TYPE:
3287 case REFERENCE_TYPE:
3288 case METHOD_TYPE:
3289 case FUNCTION_TYPE:
3290 case VECTOR_TYPE:
3291 case NULLPTR_TYPE:
3292 return false;
3293
3294 case INTEGER_TYPE:
3295 case REAL_TYPE:
3296 case FIXED_POINT_TYPE:
3297 /* Here we just check the bounds. */
3298 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3299 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3300
3301 case ARRAY_TYPE:
3302 /* We have already checked the component type above, so just check the
3303 domain type. */
3304 return type_contains_placeholder_p (TYPE_DOMAIN (type));
3305
3306 case RECORD_TYPE:
3307 case UNION_TYPE:
3308 case QUAL_UNION_TYPE:
3309 {
3310 tree field;
3311
3312 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3313 if (TREE_CODE (field) == FIELD_DECL
3314 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3315 || (TREE_CODE (type) == QUAL_UNION_TYPE
3316 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3317 || type_contains_placeholder_p (TREE_TYPE (field))))
3318 return true;
3319
3320 return false;
3321 }
3322
3323 default:
3324 gcc_unreachable ();
3325 }
3326 }
3327
3328 /* Wrapper around above function used to cache its result. */
3329
3330 bool
3331 type_contains_placeholder_p (tree type)
3332 {
3333 bool result;
3334
3335 /* If the contains_placeholder_bits field has been initialized,
3336 then we know the answer. */
3337 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3338 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3339
3340 /* Indicate that we've seen this type node, and the answer is false.
3341 This is what we want to return if we run into recursion via fields. */
3342 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3343
3344 /* Compute the real value. */
3345 result = type_contains_placeholder_1 (type);
3346
3347 /* Store the real value. */
3348 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3349
3350 return result;
3351 }
3352 \f
3353 /* Push tree EXP onto vector QUEUE if it is not already present. */
3354
3355 static void
3356 push_without_duplicates (tree exp, vec<tree> *queue)
3357 {
3358 unsigned int i;
3359 tree iter;
3360
3361 FOR_EACH_VEC_ELT (*queue, i, iter)
3362 if (simple_cst_equal (iter, exp) == 1)
3363 break;
3364
3365 if (!iter)
3366 queue->safe_push (exp);
3367 }
3368
3369 /* Given a tree EXP, find all occurrences of references to fields
3370 in a PLACEHOLDER_EXPR and place them in vector REFS without
3371 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3372 we assume here that EXP contains only arithmetic expressions
3373 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3374 argument list. */
3375
3376 void
3377 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3378 {
3379 enum tree_code code = TREE_CODE (exp);
3380 tree inner;
3381 int i;
3382
3383 /* We handle TREE_LIST and COMPONENT_REF separately. */
3384 if (code == TREE_LIST)
3385 {
3386 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3387 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3388 }
3389 else if (code == COMPONENT_REF)
3390 {
3391 for (inner = TREE_OPERAND (exp, 0);
3392 REFERENCE_CLASS_P (inner);
3393 inner = TREE_OPERAND (inner, 0))
3394 ;
3395
3396 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3397 push_without_duplicates (exp, refs);
3398 else
3399 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3400 }
3401 else
3402 switch (TREE_CODE_CLASS (code))
3403 {
3404 case tcc_constant:
3405 break;
3406
3407 case tcc_declaration:
3408 /* Variables allocated to static storage can stay. */
3409 if (!TREE_STATIC (exp))
3410 push_without_duplicates (exp, refs);
3411 break;
3412
3413 case tcc_expression:
3414 /* This is the pattern built in ada/make_aligning_type. */
3415 if (code == ADDR_EXPR
3416 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3417 {
3418 push_without_duplicates (exp, refs);
3419 break;
3420 }
3421
3422 /* Fall through... */
3423
3424 case tcc_exceptional:
3425 case tcc_unary:
3426 case tcc_binary:
3427 case tcc_comparison:
3428 case tcc_reference:
3429 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3430 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3431 break;
3432
3433 case tcc_vl_exp:
3434 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3435 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3436 break;
3437
3438 default:
3439 gcc_unreachable ();
3440 }
3441 }
3442
3443 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3444 return a tree with all occurrences of references to F in a
3445 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3446 CONST_DECLs. Note that we assume here that EXP contains only
3447 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3448 occurring only in their argument list. */
3449
3450 tree
3451 substitute_in_expr (tree exp, tree f, tree r)
3452 {
3453 enum tree_code code = TREE_CODE (exp);
3454 tree op0, op1, op2, op3;
3455 tree new_tree;
3456
3457 /* We handle TREE_LIST and COMPONENT_REF separately. */
3458 if (code == TREE_LIST)
3459 {
3460 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3461 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3462 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3463 return exp;
3464
3465 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3466 }
3467 else if (code == COMPONENT_REF)
3468 {
3469 tree inner;
3470
3471 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3472 and it is the right field, replace it with R. */
3473 for (inner = TREE_OPERAND (exp, 0);
3474 REFERENCE_CLASS_P (inner);
3475 inner = TREE_OPERAND (inner, 0))
3476 ;
3477
3478 /* The field. */
3479 op1 = TREE_OPERAND (exp, 1);
3480
3481 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3482 return r;
3483
3484 /* If this expression hasn't been completed let, leave it alone. */
3485 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
3486 return exp;
3487
3488 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3489 if (op0 == TREE_OPERAND (exp, 0))
3490 return exp;
3491
3492 new_tree
3493 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
3494 }
3495 else
3496 switch (TREE_CODE_CLASS (code))
3497 {
3498 case tcc_constant:
3499 return exp;
3500
3501 case tcc_declaration:
3502 if (exp == f)
3503 return r;
3504 else
3505 return exp;
3506
3507 case tcc_expression:
3508 if (exp == f)
3509 return r;
3510
3511 /* Fall through... */
3512
3513 case tcc_exceptional:
3514 case tcc_unary:
3515 case tcc_binary:
3516 case tcc_comparison:
3517 case tcc_reference:
3518 switch (TREE_CODE_LENGTH (code))
3519 {
3520 case 0:
3521 return exp;
3522
3523 case 1:
3524 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3525 if (op0 == TREE_OPERAND (exp, 0))
3526 return exp;
3527
3528 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3529 break;
3530
3531 case 2:
3532 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3533 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3534
3535 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3536 return exp;
3537
3538 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3539 break;
3540
3541 case 3:
3542 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3543 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3544 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3545
3546 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3547 && op2 == TREE_OPERAND (exp, 2))
3548 return exp;
3549
3550 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3551 break;
3552
3553 case 4:
3554 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3555 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3556 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3557 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
3558
3559 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3560 && op2 == TREE_OPERAND (exp, 2)
3561 && op3 == TREE_OPERAND (exp, 3))
3562 return exp;
3563
3564 new_tree
3565 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3566 break;
3567
3568 default:
3569 gcc_unreachable ();
3570 }
3571 break;
3572
3573 case tcc_vl_exp:
3574 {
3575 int i;
3576
3577 new_tree = NULL_TREE;
3578
3579 /* If we are trying to replace F with a constant, inline back
3580 functions which do nothing else than computing a value from
3581 the arguments they are passed. This makes it possible to
3582 fold partially or entirely the replacement expression. */
3583 if (CONSTANT_CLASS_P (r) && code == CALL_EXPR)
3584 {
3585 tree t = maybe_inline_call_in_expr (exp);
3586 if (t)
3587 return SUBSTITUTE_IN_EXPR (t, f, r);
3588 }
3589
3590 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3591 {
3592 tree op = TREE_OPERAND (exp, i);
3593 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
3594 if (new_op != op)
3595 {
3596 if (!new_tree)
3597 new_tree = copy_node (exp);
3598 TREE_OPERAND (new_tree, i) = new_op;
3599 }
3600 }
3601
3602 if (new_tree)
3603 {
3604 new_tree = fold (new_tree);
3605 if (TREE_CODE (new_tree) == CALL_EXPR)
3606 process_call_operands (new_tree);
3607 }
3608 else
3609 return exp;
3610 }
3611 break;
3612
3613 default:
3614 gcc_unreachable ();
3615 }
3616
3617 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3618
3619 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3620 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3621
3622 return new_tree;
3623 }
3624
3625 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3626 for it within OBJ, a tree that is an object or a chain of references. */
3627
3628 tree
3629 substitute_placeholder_in_expr (tree exp, tree obj)
3630 {
3631 enum tree_code code = TREE_CODE (exp);
3632 tree op0, op1, op2, op3;
3633 tree new_tree;
3634
3635 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3636 in the chain of OBJ. */
3637 if (code == PLACEHOLDER_EXPR)
3638 {
3639 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
3640 tree elt;
3641
3642 for (elt = obj; elt != 0;
3643 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3644 || TREE_CODE (elt) == COND_EXPR)
3645 ? TREE_OPERAND (elt, 1)
3646 : (REFERENCE_CLASS_P (elt)
3647 || UNARY_CLASS_P (elt)
3648 || BINARY_CLASS_P (elt)
3649 || VL_EXP_CLASS_P (elt)
3650 || EXPRESSION_CLASS_P (elt))
3651 ? TREE_OPERAND (elt, 0) : 0))
3652 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
3653 return elt;
3654
3655 for (elt = obj; elt != 0;
3656 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3657 || TREE_CODE (elt) == COND_EXPR)
3658 ? TREE_OPERAND (elt, 1)
3659 : (REFERENCE_CLASS_P (elt)
3660 || UNARY_CLASS_P (elt)
3661 || BINARY_CLASS_P (elt)
3662 || VL_EXP_CLASS_P (elt)
3663 || EXPRESSION_CLASS_P (elt))
3664 ? TREE_OPERAND (elt, 0) : 0))
3665 if (POINTER_TYPE_P (TREE_TYPE (elt))
3666 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
3667 == need_type))
3668 return fold_build1 (INDIRECT_REF, need_type, elt);
3669
3670 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
3671 survives until RTL generation, there will be an error. */
3672 return exp;
3673 }
3674
3675 /* TREE_LIST is special because we need to look at TREE_VALUE
3676 and TREE_CHAIN, not TREE_OPERANDS. */
3677 else if (code == TREE_LIST)
3678 {
3679 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
3680 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
3681 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3682 return exp;
3683
3684 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3685 }
3686 else
3687 switch (TREE_CODE_CLASS (code))
3688 {
3689 case tcc_constant:
3690 case tcc_declaration:
3691 return exp;
3692
3693 case tcc_exceptional:
3694 case tcc_unary:
3695 case tcc_binary:
3696 case tcc_comparison:
3697 case tcc_expression:
3698 case tcc_reference:
3699 case tcc_statement:
3700 switch (TREE_CODE_LENGTH (code))
3701 {
3702 case 0:
3703 return exp;
3704
3705 case 1:
3706 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3707 if (op0 == TREE_OPERAND (exp, 0))
3708 return exp;
3709
3710 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3711 break;
3712
3713 case 2:
3714 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3715 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3716
3717 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3718 return exp;
3719
3720 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3721 break;
3722
3723 case 3:
3724 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3725 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3726 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3727
3728 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3729 && op2 == TREE_OPERAND (exp, 2))
3730 return exp;
3731
3732 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3733 break;
3734
3735 case 4:
3736 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3737 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3738 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3739 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
3740
3741 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3742 && op2 == TREE_OPERAND (exp, 2)
3743 && op3 == TREE_OPERAND (exp, 3))
3744 return exp;
3745
3746 new_tree
3747 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3748 break;
3749
3750 default:
3751 gcc_unreachable ();
3752 }
3753 break;
3754
3755 case tcc_vl_exp:
3756 {
3757 int i;
3758
3759 new_tree = NULL_TREE;
3760
3761 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3762 {
3763 tree op = TREE_OPERAND (exp, i);
3764 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
3765 if (new_op != op)
3766 {
3767 if (!new_tree)
3768 new_tree = copy_node (exp);
3769 TREE_OPERAND (new_tree, i) = new_op;
3770 }
3771 }
3772
3773 if (new_tree)
3774 {
3775 new_tree = fold (new_tree);
3776 if (TREE_CODE (new_tree) == CALL_EXPR)
3777 process_call_operands (new_tree);
3778 }
3779 else
3780 return exp;
3781 }
3782 break;
3783
3784 default:
3785 gcc_unreachable ();
3786 }
3787
3788 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3789
3790 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3791 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3792
3793 return new_tree;
3794 }
3795 \f
3796 /* Stabilize a reference so that we can use it any number of times
3797 without causing its operands to be evaluated more than once.
3798 Returns the stabilized reference. This works by means of save_expr,
3799 so see the caveats in the comments about save_expr.
3800
3801 Also allows conversion expressions whose operands are references.
3802 Any other kind of expression is returned unchanged. */
3803
3804 tree
3805 stabilize_reference (tree ref)
3806 {
3807 tree result;
3808 enum tree_code code = TREE_CODE (ref);
3809
3810 switch (code)
3811 {
3812 case VAR_DECL:
3813 case PARM_DECL:
3814 case RESULT_DECL:
3815 /* No action is needed in this case. */
3816 return ref;
3817
3818 CASE_CONVERT:
3819 case FLOAT_EXPR:
3820 case FIX_TRUNC_EXPR:
3821 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
3822 break;
3823
3824 case INDIRECT_REF:
3825 result = build_nt (INDIRECT_REF,
3826 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
3827 break;
3828
3829 case COMPONENT_REF:
3830 result = build_nt (COMPONENT_REF,
3831 stabilize_reference (TREE_OPERAND (ref, 0)),
3832 TREE_OPERAND (ref, 1), NULL_TREE);
3833 break;
3834
3835 case BIT_FIELD_REF:
3836 result = build_nt (BIT_FIELD_REF,
3837 stabilize_reference (TREE_OPERAND (ref, 0)),
3838 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
3839 break;
3840
3841 case ARRAY_REF:
3842 result = build_nt (ARRAY_REF,
3843 stabilize_reference (TREE_OPERAND (ref, 0)),
3844 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
3845 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
3846 break;
3847
3848 case ARRAY_RANGE_REF:
3849 result = build_nt (ARRAY_RANGE_REF,
3850 stabilize_reference (TREE_OPERAND (ref, 0)),
3851 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
3852 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
3853 break;
3854
3855 case COMPOUND_EXPR:
3856 /* We cannot wrap the first expression in a SAVE_EXPR, as then
3857 it wouldn't be ignored. This matters when dealing with
3858 volatiles. */
3859 return stabilize_reference_1 (ref);
3860
3861 /* If arg isn't a kind of lvalue we recognize, make no change.
3862 Caller should recognize the error for an invalid lvalue. */
3863 default:
3864 return ref;
3865
3866 case ERROR_MARK:
3867 return error_mark_node;
3868 }
3869
3870 TREE_TYPE (result) = TREE_TYPE (ref);
3871 TREE_READONLY (result) = TREE_READONLY (ref);
3872 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
3873 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
3874
3875 return result;
3876 }
3877
3878 /* Subroutine of stabilize_reference; this is called for subtrees of
3879 references. Any expression with side-effects must be put in a SAVE_EXPR
3880 to ensure that it is only evaluated once.
3881
3882 We don't put SAVE_EXPR nodes around everything, because assigning very
3883 simple expressions to temporaries causes us to miss good opportunities
3884 for optimizations. Among other things, the opportunity to fold in the
3885 addition of a constant into an addressing mode often gets lost, e.g.
3886 "y[i+1] += x;". In general, we take the approach that we should not make
3887 an assignment unless we are forced into it - i.e., that any non-side effect
3888 operator should be allowed, and that cse should take care of coalescing
3889 multiple utterances of the same expression should that prove fruitful. */
3890
3891 tree
3892 stabilize_reference_1 (tree e)
3893 {
3894 tree result;
3895 enum tree_code code = TREE_CODE (e);
3896
3897 /* We cannot ignore const expressions because it might be a reference
3898 to a const array but whose index contains side-effects. But we can
3899 ignore things that are actual constant or that already have been
3900 handled by this function. */
3901
3902 if (tree_invariant_p (e))
3903 return e;
3904
3905 switch (TREE_CODE_CLASS (code))
3906 {
3907 case tcc_exceptional:
3908 case tcc_type:
3909 case tcc_declaration:
3910 case tcc_comparison:
3911 case tcc_statement:
3912 case tcc_expression:
3913 case tcc_reference:
3914 case tcc_vl_exp:
3915 /* If the expression has side-effects, then encase it in a SAVE_EXPR
3916 so that it will only be evaluated once. */
3917 /* The reference (r) and comparison (<) classes could be handled as
3918 below, but it is generally faster to only evaluate them once. */
3919 if (TREE_SIDE_EFFECTS (e))
3920 return save_expr (e);
3921 return e;
3922
3923 case tcc_constant:
3924 /* Constants need no processing. In fact, we should never reach
3925 here. */
3926 return e;
3927
3928 case tcc_binary:
3929 /* Division is slow and tends to be compiled with jumps,
3930 especially the division by powers of 2 that is often
3931 found inside of an array reference. So do it just once. */
3932 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
3933 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
3934 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
3935 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
3936 return save_expr (e);
3937 /* Recursively stabilize each operand. */
3938 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
3939 stabilize_reference_1 (TREE_OPERAND (e, 1)));
3940 break;
3941
3942 case tcc_unary:
3943 /* Recursively stabilize each operand. */
3944 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
3945 break;
3946
3947 default:
3948 gcc_unreachable ();
3949 }
3950
3951 TREE_TYPE (result) = TREE_TYPE (e);
3952 TREE_READONLY (result) = TREE_READONLY (e);
3953 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
3954 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
3955
3956 return result;
3957 }
3958 \f
3959 /* Low-level constructors for expressions. */
3960
3961 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
3962 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
3963
3964 void
3965 recompute_tree_invariant_for_addr_expr (tree t)
3966 {
3967 tree node;
3968 bool tc = true, se = false;
3969
3970 /* We started out assuming this address is both invariant and constant, but
3971 does not have side effects. Now go down any handled components and see if
3972 any of them involve offsets that are either non-constant or non-invariant.
3973 Also check for side-effects.
3974
3975 ??? Note that this code makes no attempt to deal with the case where
3976 taking the address of something causes a copy due to misalignment. */
3977
3978 #define UPDATE_FLAGS(NODE) \
3979 do { tree _node = (NODE); \
3980 if (_node && !TREE_CONSTANT (_node)) tc = false; \
3981 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
3982
3983 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
3984 node = TREE_OPERAND (node, 0))
3985 {
3986 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
3987 array reference (probably made temporarily by the G++ front end),
3988 so ignore all the operands. */
3989 if ((TREE_CODE (node) == ARRAY_REF
3990 || TREE_CODE (node) == ARRAY_RANGE_REF)
3991 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
3992 {
3993 UPDATE_FLAGS (TREE_OPERAND (node, 1));
3994 if (TREE_OPERAND (node, 2))
3995 UPDATE_FLAGS (TREE_OPERAND (node, 2));
3996 if (TREE_OPERAND (node, 3))
3997 UPDATE_FLAGS (TREE_OPERAND (node, 3));
3998 }
3999 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4000 FIELD_DECL, apparently. The G++ front end can put something else
4001 there, at least temporarily. */
4002 else if (TREE_CODE (node) == COMPONENT_REF
4003 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4004 {
4005 if (TREE_OPERAND (node, 2))
4006 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4007 }
4008 }
4009
4010 node = lang_hooks.expr_to_decl (node, &tc, &se);
4011
4012 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4013 the address, since &(*a)->b is a form of addition. If it's a constant, the
4014 address is constant too. If it's a decl, its address is constant if the
4015 decl is static. Everything else is not constant and, furthermore,
4016 taking the address of a volatile variable is not volatile. */
4017 if (TREE_CODE (node) == INDIRECT_REF
4018 || TREE_CODE (node) == MEM_REF)
4019 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4020 else if (CONSTANT_CLASS_P (node))
4021 ;
4022 else if (DECL_P (node))
4023 tc &= (staticp (node) != NULL_TREE);
4024 else
4025 {
4026 tc = false;
4027 se |= TREE_SIDE_EFFECTS (node);
4028 }
4029
4030
4031 TREE_CONSTANT (t) = tc;
4032 TREE_SIDE_EFFECTS (t) = se;
4033 #undef UPDATE_FLAGS
4034 }
4035
4036 /* Build an expression of code CODE, data type TYPE, and operands as
4037 specified. Expressions and reference nodes can be created this way.
4038 Constants, decls, types and misc nodes cannot be.
4039
4040 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4041 enough for all extant tree codes. */
4042
4043 tree
4044 build0_stat (enum tree_code code, tree tt MEM_STAT_DECL)
4045 {
4046 tree t;
4047
4048 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4049
4050 t = make_node_stat (code PASS_MEM_STAT);
4051 TREE_TYPE (t) = tt;
4052
4053 return t;
4054 }
4055
4056 tree
4057 build1_stat (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4058 {
4059 int length = sizeof (struct tree_exp);
4060 tree t;
4061
4062 record_node_allocation_statistics (code, length);
4063
4064 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4065
4066 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4067
4068 memset (t, 0, sizeof (struct tree_common));
4069
4070 TREE_SET_CODE (t, code);
4071
4072 TREE_TYPE (t) = type;
4073 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4074 TREE_OPERAND (t, 0) = node;
4075 if (node && !TYPE_P (node))
4076 {
4077 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4078 TREE_READONLY (t) = TREE_READONLY (node);
4079 }
4080
4081 if (TREE_CODE_CLASS (code) == tcc_statement)
4082 TREE_SIDE_EFFECTS (t) = 1;
4083 else switch (code)
4084 {
4085 case VA_ARG_EXPR:
4086 /* All of these have side-effects, no matter what their
4087 operands are. */
4088 TREE_SIDE_EFFECTS (t) = 1;
4089 TREE_READONLY (t) = 0;
4090 break;
4091
4092 case INDIRECT_REF:
4093 /* Whether a dereference is readonly has nothing to do with whether
4094 its operand is readonly. */
4095 TREE_READONLY (t) = 0;
4096 break;
4097
4098 case ADDR_EXPR:
4099 if (node)
4100 recompute_tree_invariant_for_addr_expr (t);
4101 break;
4102
4103 default:
4104 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4105 && node && !TYPE_P (node)
4106 && TREE_CONSTANT (node))
4107 TREE_CONSTANT (t) = 1;
4108 if (TREE_CODE_CLASS (code) == tcc_reference
4109 && node && TREE_THIS_VOLATILE (node))
4110 TREE_THIS_VOLATILE (t) = 1;
4111 break;
4112 }
4113
4114 return t;
4115 }
4116
4117 #define PROCESS_ARG(N) \
4118 do { \
4119 TREE_OPERAND (t, N) = arg##N; \
4120 if (arg##N &&!TYPE_P (arg##N)) \
4121 { \
4122 if (TREE_SIDE_EFFECTS (arg##N)) \
4123 side_effects = 1; \
4124 if (!TREE_READONLY (arg##N) \
4125 && !CONSTANT_CLASS_P (arg##N)) \
4126 (void) (read_only = 0); \
4127 if (!TREE_CONSTANT (arg##N)) \
4128 (void) (constant = 0); \
4129 } \
4130 } while (0)
4131
4132 tree
4133 build2_stat (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4134 {
4135 bool constant, read_only, side_effects;
4136 tree t;
4137
4138 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4139
4140 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4141 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4142 /* When sizetype precision doesn't match that of pointers
4143 we need to be able to build explicit extensions or truncations
4144 of the offset argument. */
4145 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4146 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4147 && TREE_CODE (arg1) == INTEGER_CST);
4148
4149 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4150 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4151 && ptrofftype_p (TREE_TYPE (arg1)));
4152
4153 t = make_node_stat (code PASS_MEM_STAT);
4154 TREE_TYPE (t) = tt;
4155
4156 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4157 result based on those same flags for the arguments. But if the
4158 arguments aren't really even `tree' expressions, we shouldn't be trying
4159 to do this. */
4160
4161 /* Expressions without side effects may be constant if their
4162 arguments are as well. */
4163 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4164 || TREE_CODE_CLASS (code) == tcc_binary);
4165 read_only = 1;
4166 side_effects = TREE_SIDE_EFFECTS (t);
4167
4168 PROCESS_ARG (0);
4169 PROCESS_ARG (1);
4170
4171 TREE_READONLY (t) = read_only;
4172 TREE_CONSTANT (t) = constant;
4173 TREE_SIDE_EFFECTS (t) = side_effects;
4174 TREE_THIS_VOLATILE (t)
4175 = (TREE_CODE_CLASS (code) == tcc_reference
4176 && arg0 && TREE_THIS_VOLATILE (arg0));
4177
4178 return t;
4179 }
4180
4181
4182 tree
4183 build3_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4184 tree arg2 MEM_STAT_DECL)
4185 {
4186 bool constant, read_only, side_effects;
4187 tree t;
4188
4189 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4190 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4191
4192 t = make_node_stat (code PASS_MEM_STAT);
4193 TREE_TYPE (t) = tt;
4194
4195 read_only = 1;
4196
4197 /* As a special exception, if COND_EXPR has NULL branches, we
4198 assume that it is a gimple statement and always consider
4199 it to have side effects. */
4200 if (code == COND_EXPR
4201 && tt == void_type_node
4202 && arg1 == NULL_TREE
4203 && arg2 == NULL_TREE)
4204 side_effects = true;
4205 else
4206 side_effects = TREE_SIDE_EFFECTS (t);
4207
4208 PROCESS_ARG (0);
4209 PROCESS_ARG (1);
4210 PROCESS_ARG (2);
4211
4212 if (code == COND_EXPR)
4213 TREE_READONLY (t) = read_only;
4214
4215 TREE_SIDE_EFFECTS (t) = side_effects;
4216 TREE_THIS_VOLATILE (t)
4217 = (TREE_CODE_CLASS (code) == tcc_reference
4218 && arg0 && TREE_THIS_VOLATILE (arg0));
4219
4220 return t;
4221 }
4222
4223 tree
4224 build4_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4225 tree arg2, tree arg3 MEM_STAT_DECL)
4226 {
4227 bool constant, read_only, side_effects;
4228 tree t;
4229
4230 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4231
4232 t = make_node_stat (code PASS_MEM_STAT);
4233 TREE_TYPE (t) = tt;
4234
4235 side_effects = TREE_SIDE_EFFECTS (t);
4236
4237 PROCESS_ARG (0);
4238 PROCESS_ARG (1);
4239 PROCESS_ARG (2);
4240 PROCESS_ARG (3);
4241
4242 TREE_SIDE_EFFECTS (t) = side_effects;
4243 TREE_THIS_VOLATILE (t)
4244 = (TREE_CODE_CLASS (code) == tcc_reference
4245 && arg0 && TREE_THIS_VOLATILE (arg0));
4246
4247 return t;
4248 }
4249
4250 tree
4251 build5_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4252 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4253 {
4254 bool constant, read_only, side_effects;
4255 tree t;
4256
4257 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4258
4259 t = make_node_stat (code PASS_MEM_STAT);
4260 TREE_TYPE (t) = tt;
4261
4262 side_effects = TREE_SIDE_EFFECTS (t);
4263
4264 PROCESS_ARG (0);
4265 PROCESS_ARG (1);
4266 PROCESS_ARG (2);
4267 PROCESS_ARG (3);
4268 PROCESS_ARG (4);
4269
4270 TREE_SIDE_EFFECTS (t) = side_effects;
4271 TREE_THIS_VOLATILE (t)
4272 = (TREE_CODE_CLASS (code) == tcc_reference
4273 && arg0 && TREE_THIS_VOLATILE (arg0));
4274
4275 return t;
4276 }
4277
4278 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4279 on the pointer PTR. */
4280
4281 tree
4282 build_simple_mem_ref_loc (location_t loc, tree ptr)
4283 {
4284 HOST_WIDE_INT offset = 0;
4285 tree ptype = TREE_TYPE (ptr);
4286 tree tem;
4287 /* For convenience allow addresses that collapse to a simple base
4288 and offset. */
4289 if (TREE_CODE (ptr) == ADDR_EXPR
4290 && (handled_component_p (TREE_OPERAND (ptr, 0))
4291 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4292 {
4293 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4294 gcc_assert (ptr);
4295 ptr = build_fold_addr_expr (ptr);
4296 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4297 }
4298 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4299 ptr, build_int_cst (ptype, offset));
4300 SET_EXPR_LOCATION (tem, loc);
4301 return tem;
4302 }
4303
4304 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4305
4306 offset_int
4307 mem_ref_offset (const_tree t)
4308 {
4309 return offset_int::from (TREE_OPERAND (t, 1), SIGNED);
4310 }
4311
4312 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4313 offsetted by OFFSET units. */
4314
4315 tree
4316 build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
4317 {
4318 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4319 build_fold_addr_expr (base),
4320 build_int_cst (ptr_type_node, offset));
4321 tree addr = build1 (ADDR_EXPR, type, ref);
4322 recompute_tree_invariant_for_addr_expr (addr);
4323 return addr;
4324 }
4325
4326 /* Similar except don't specify the TREE_TYPE
4327 and leave the TREE_SIDE_EFFECTS as 0.
4328 It is permissible for arguments to be null,
4329 or even garbage if their values do not matter. */
4330
4331 tree
4332 build_nt (enum tree_code code, ...)
4333 {
4334 tree t;
4335 int length;
4336 int i;
4337 va_list p;
4338
4339 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4340
4341 va_start (p, code);
4342
4343 t = make_node (code);
4344 length = TREE_CODE_LENGTH (code);
4345
4346 for (i = 0; i < length; i++)
4347 TREE_OPERAND (t, i) = va_arg (p, tree);
4348
4349 va_end (p);
4350 return t;
4351 }
4352
4353 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4354 tree vec. */
4355
4356 tree
4357 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4358 {
4359 tree ret, t;
4360 unsigned int ix;
4361
4362 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4363 CALL_EXPR_FN (ret) = fn;
4364 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4365 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4366 CALL_EXPR_ARG (ret, ix) = t;
4367 return ret;
4368 }
4369 \f
4370 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4371 We do NOT enter this node in any sort of symbol table.
4372
4373 LOC is the location of the decl.
4374
4375 layout_decl is used to set up the decl's storage layout.
4376 Other slots are initialized to 0 or null pointers. */
4377
4378 tree
4379 build_decl_stat (location_t loc, enum tree_code code, tree name,
4380 tree type MEM_STAT_DECL)
4381 {
4382 tree t;
4383
4384 t = make_node_stat (code PASS_MEM_STAT);
4385 DECL_SOURCE_LOCATION (t) = loc;
4386
4387 /* if (type == error_mark_node)
4388 type = integer_type_node; */
4389 /* That is not done, deliberately, so that having error_mark_node
4390 as the type can suppress useless errors in the use of this variable. */
4391
4392 DECL_NAME (t) = name;
4393 TREE_TYPE (t) = type;
4394
4395 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4396 layout_decl (t, 0);
4397
4398 return t;
4399 }
4400
4401 /* Builds and returns function declaration with NAME and TYPE. */
4402
4403 tree
4404 build_fn_decl (const char *name, tree type)
4405 {
4406 tree id = get_identifier (name);
4407 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4408
4409 DECL_EXTERNAL (decl) = 1;
4410 TREE_PUBLIC (decl) = 1;
4411 DECL_ARTIFICIAL (decl) = 1;
4412 TREE_NOTHROW (decl) = 1;
4413
4414 return decl;
4415 }
4416
4417 vec<tree, va_gc> *all_translation_units;
4418
4419 /* Builds a new translation-unit decl with name NAME, queues it in the
4420 global list of translation-unit decls and returns it. */
4421
4422 tree
4423 build_translation_unit_decl (tree name)
4424 {
4425 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
4426 name, NULL_TREE);
4427 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
4428 vec_safe_push (all_translation_units, tu);
4429 return tu;
4430 }
4431
4432 \f
4433 /* BLOCK nodes are used to represent the structure of binding contours
4434 and declarations, once those contours have been exited and their contents
4435 compiled. This information is used for outputting debugging info. */
4436
4437 tree
4438 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
4439 {
4440 tree block = make_node (BLOCK);
4441
4442 BLOCK_VARS (block) = vars;
4443 BLOCK_SUBBLOCKS (block) = subblocks;
4444 BLOCK_SUPERCONTEXT (block) = supercontext;
4445 BLOCK_CHAIN (block) = chain;
4446 return block;
4447 }
4448
4449 \f
4450 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4451
4452 LOC is the location to use in tree T. */
4453
4454 void
4455 protected_set_expr_location (tree t, location_t loc)
4456 {
4457 if (t && CAN_HAVE_LOCATION_P (t))
4458 SET_EXPR_LOCATION (t, loc);
4459 }
4460 \f
4461 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES
4462 is ATTRIBUTE. */
4463
4464 tree
4465 build_decl_attribute_variant (tree ddecl, tree attribute)
4466 {
4467 DECL_ATTRIBUTES (ddecl) = attribute;
4468 return ddecl;
4469 }
4470
4471 /* Borrowed from hashtab.c iterative_hash implementation. */
4472 #define mix(a,b,c) \
4473 { \
4474 a -= b; a -= c; a ^= (c>>13); \
4475 b -= c; b -= a; b ^= (a<< 8); \
4476 c -= a; c -= b; c ^= ((b&0xffffffff)>>13); \
4477 a -= b; a -= c; a ^= ((c&0xffffffff)>>12); \
4478 b -= c; b -= a; b = (b ^ (a<<16)) & 0xffffffff; \
4479 c -= a; c -= b; c = (c ^ (b>> 5)) & 0xffffffff; \
4480 a -= b; a -= c; a = (a ^ (c>> 3)) & 0xffffffff; \
4481 b -= c; b -= a; b = (b ^ (a<<10)) & 0xffffffff; \
4482 c -= a; c -= b; c = (c ^ (b>>15)) & 0xffffffff; \
4483 }
4484
4485
4486 /* Produce good hash value combining VAL and VAL2. */
4487 hashval_t
4488 iterative_hash_hashval_t (hashval_t val, hashval_t val2)
4489 {
4490 /* the golden ratio; an arbitrary value. */
4491 hashval_t a = 0x9e3779b9;
4492
4493 mix (a, val, val2);
4494 return val2;
4495 }
4496
4497 /* Produce good hash value combining VAL and VAL2. */
4498 hashval_t
4499 iterative_hash_host_wide_int (HOST_WIDE_INT val, hashval_t val2)
4500 {
4501 if (sizeof (HOST_WIDE_INT) == sizeof (hashval_t))
4502 return iterative_hash_hashval_t (val, val2);
4503 else
4504 {
4505 hashval_t a = (hashval_t) val;
4506 /* Avoid warnings about shifting of more than the width of the type on
4507 hosts that won't execute this path. */
4508 int zero = 0;
4509 hashval_t b = (hashval_t) (val >> (sizeof (hashval_t) * 8 + zero));
4510 mix (a, b, val2);
4511 if (sizeof (HOST_WIDE_INT) > 2 * sizeof (hashval_t))
4512 {
4513 hashval_t a = (hashval_t) (val >> (sizeof (hashval_t) * 16 + zero));
4514 hashval_t b = (hashval_t) (val >> (sizeof (hashval_t) * 24 + zero));
4515 mix (a, b, val2);
4516 }
4517 return val2;
4518 }
4519 }
4520
4521 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4522 is ATTRIBUTE and its qualifiers are QUALS.
4523
4524 Record such modified types already made so we don't make duplicates. */
4525
4526 tree
4527 build_type_attribute_qual_variant (tree ttype, tree attribute, int quals)
4528 {
4529 if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype), attribute))
4530 {
4531 hashval_t hashcode = 0;
4532 tree ntype;
4533 int i;
4534 tree t;
4535 enum tree_code code = TREE_CODE (ttype);
4536
4537 /* Building a distinct copy of a tagged type is inappropriate; it
4538 causes breakage in code that expects there to be a one-to-one
4539 relationship between a struct and its fields.
4540 build_duplicate_type is another solution (as used in
4541 handle_transparent_union_attribute), but that doesn't play well
4542 with the stronger C++ type identity model. */
4543 if (TREE_CODE (ttype) == RECORD_TYPE
4544 || TREE_CODE (ttype) == UNION_TYPE
4545 || TREE_CODE (ttype) == QUAL_UNION_TYPE
4546 || TREE_CODE (ttype) == ENUMERAL_TYPE)
4547 {
4548 warning (OPT_Wattributes,
4549 "ignoring attributes applied to %qT after definition",
4550 TYPE_MAIN_VARIANT (ttype));
4551 return build_qualified_type (ttype, quals);
4552 }
4553
4554 ttype = build_qualified_type (ttype, TYPE_UNQUALIFIED);
4555 ntype = build_distinct_type_copy (ttype);
4556
4557 TYPE_ATTRIBUTES (ntype) = attribute;
4558
4559 hashcode = iterative_hash_object (code, hashcode);
4560 if (TREE_TYPE (ntype))
4561 hashcode = iterative_hash_object (TYPE_HASH (TREE_TYPE (ntype)),
4562 hashcode);
4563 hashcode = attribute_hash_list (attribute, hashcode);
4564
4565 switch (TREE_CODE (ntype))
4566 {
4567 case FUNCTION_TYPE:
4568 hashcode = type_hash_list (TYPE_ARG_TYPES (ntype), hashcode);
4569 break;
4570 case ARRAY_TYPE:
4571 if (TYPE_DOMAIN (ntype))
4572 hashcode = iterative_hash_object (TYPE_HASH (TYPE_DOMAIN (ntype)),
4573 hashcode);
4574 break;
4575 case INTEGER_TYPE:
4576 t = TYPE_MAX_VALUE (ntype);
4577 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
4578 hashcode = iterative_hash_object (TREE_INT_CST_ELT (t, i), hashcode);
4579 break;
4580 case REAL_TYPE:
4581 case FIXED_POINT_TYPE:
4582 {
4583 unsigned int precision = TYPE_PRECISION (ntype);
4584 hashcode = iterative_hash_object (precision, hashcode);
4585 }
4586 break;
4587 default:
4588 break;
4589 }
4590
4591 ntype = type_hash_canon (hashcode, ntype);
4592
4593 /* If the target-dependent attributes make NTYPE different from
4594 its canonical type, we will need to use structural equality
4595 checks for this type. */
4596 if (TYPE_STRUCTURAL_EQUALITY_P (ttype)
4597 || !comp_type_attributes (ntype, ttype))
4598 SET_TYPE_STRUCTURAL_EQUALITY (ntype);
4599 else if (TYPE_CANONICAL (ntype) == ntype)
4600 TYPE_CANONICAL (ntype) = TYPE_CANONICAL (ttype);
4601
4602 ttype = build_qualified_type (ntype, quals);
4603 }
4604 else if (TYPE_QUALS (ttype) != quals)
4605 ttype = build_qualified_type (ttype, quals);
4606
4607 return ttype;
4608 }
4609
4610 /* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are
4611 the same. */
4612
4613 static bool
4614 omp_declare_simd_clauses_equal (tree clauses1, tree clauses2)
4615 {
4616 tree cl1, cl2;
4617 for (cl1 = clauses1, cl2 = clauses2;
4618 cl1 && cl2;
4619 cl1 = OMP_CLAUSE_CHAIN (cl1), cl2 = OMP_CLAUSE_CHAIN (cl2))
4620 {
4621 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_CODE (cl2))
4622 return false;
4623 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_SIMDLEN)
4624 {
4625 if (simple_cst_equal (OMP_CLAUSE_DECL (cl1),
4626 OMP_CLAUSE_DECL (cl2)) != 1)
4627 return false;
4628 }
4629 switch (OMP_CLAUSE_CODE (cl1))
4630 {
4631 case OMP_CLAUSE_ALIGNED:
4632 if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1),
4633 OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2)) != 1)
4634 return false;
4635 break;
4636 case OMP_CLAUSE_LINEAR:
4637 if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1),
4638 OMP_CLAUSE_LINEAR_STEP (cl2)) != 1)
4639 return false;
4640 break;
4641 case OMP_CLAUSE_SIMDLEN:
4642 if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1),
4643 OMP_CLAUSE_SIMDLEN_EXPR (cl2)) != 1)
4644 return false;
4645 default:
4646 break;
4647 }
4648 }
4649 return true;
4650 }
4651
4652 /* Remove duplicate "omp declare simd" attributes. */
4653
4654 void
4655 omp_remove_redundant_declare_simd_attrs (tree fndecl)
4656 {
4657 tree attr, end_attr = NULL_TREE, last_attr = NULL_TREE;
4658 for (attr = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (fndecl));
4659 attr;
4660 attr = lookup_attribute ("omp declare simd", TREE_CHAIN (attr)))
4661 {
4662 tree *pc;
4663 for (pc = &TREE_CHAIN (attr); *pc && *pc != end_attr; )
4664 {
4665 if (is_attribute_p ("omp declare simd", TREE_PURPOSE (*pc)))
4666 {
4667 last_attr = TREE_CHAIN (*pc);
4668 if (TREE_VALUE (attr) == NULL_TREE)
4669 {
4670 if (TREE_VALUE (*pc) == NULL_TREE)
4671 {
4672 *pc = TREE_CHAIN (*pc);
4673 continue;
4674 }
4675 }
4676 else if (TREE_VALUE (*pc) != NULL_TREE
4677 && omp_declare_simd_clauses_equal
4678 (TREE_VALUE (TREE_VALUE (*pc)),
4679 TREE_VALUE (TREE_VALUE (attr))))
4680 {
4681 *pc = TREE_CHAIN (*pc);
4682 continue;
4683 }
4684 }
4685 pc = &TREE_CHAIN (*pc);
4686 }
4687 end_attr = last_attr;
4688 }
4689 }
4690
4691 /* Compare two attributes for their value identity. Return true if the
4692 attribute values are known to be equal; otherwise return false.
4693 */
4694
4695 static bool
4696 attribute_value_equal (const_tree attr1, const_tree attr2)
4697 {
4698 if (TREE_VALUE (attr1) == TREE_VALUE (attr2))
4699 return true;
4700
4701 if (TREE_VALUE (attr1) != NULL_TREE
4702 && TREE_CODE (TREE_VALUE (attr1)) == TREE_LIST
4703 && TREE_VALUE (attr2) != NULL
4704 && TREE_CODE (TREE_VALUE (attr2)) == TREE_LIST)
4705 return (simple_cst_list_equal (TREE_VALUE (attr1),
4706 TREE_VALUE (attr2)) == 1);
4707
4708 if (flag_openmp
4709 && TREE_VALUE (attr1) && TREE_VALUE (attr2)
4710 && TREE_CODE (TREE_VALUE (attr1)) == OMP_CLAUSE
4711 && TREE_CODE (TREE_VALUE (attr2)) == OMP_CLAUSE)
4712 return omp_declare_simd_clauses_equal (TREE_VALUE (attr1),
4713 TREE_VALUE (attr2));
4714
4715 return (simple_cst_equal (TREE_VALUE (attr1), TREE_VALUE (attr2)) == 1);
4716 }
4717
4718 /* Return 0 if the attributes for two types are incompatible, 1 if they
4719 are compatible, and 2 if they are nearly compatible (which causes a
4720 warning to be generated). */
4721 int
4722 comp_type_attributes (const_tree type1, const_tree type2)
4723 {
4724 const_tree a1 = TYPE_ATTRIBUTES (type1);
4725 const_tree a2 = TYPE_ATTRIBUTES (type2);
4726 const_tree a;
4727
4728 if (a1 == a2)
4729 return 1;
4730 for (a = a1; a != NULL_TREE; a = TREE_CHAIN (a))
4731 {
4732 const struct attribute_spec *as;
4733 const_tree attr;
4734
4735 as = lookup_attribute_spec (get_attribute_name (a));
4736 if (!as || as->affects_type_identity == false)
4737 continue;
4738
4739 attr = lookup_attribute (as->name, CONST_CAST_TREE (a2));
4740 if (!attr || !attribute_value_equal (a, attr))
4741 break;
4742 }
4743 if (!a)
4744 {
4745 for (a = a2; a != NULL_TREE; a = TREE_CHAIN (a))
4746 {
4747 const struct attribute_spec *as;
4748
4749 as = lookup_attribute_spec (get_attribute_name (a));
4750 if (!as || as->affects_type_identity == false)
4751 continue;
4752
4753 if (!lookup_attribute (as->name, CONST_CAST_TREE (a1)))
4754 break;
4755 /* We don't need to compare trees again, as we did this
4756 already in first loop. */
4757 }
4758 /* All types - affecting identity - are equal, so
4759 there is no need to call target hook for comparison. */
4760 if (!a)
4761 return 1;
4762 }
4763 /* As some type combinations - like default calling-convention - might
4764 be compatible, we have to call the target hook to get the final result. */
4765 return targetm.comp_type_attributes (type1, type2);
4766 }
4767
4768 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4769 is ATTRIBUTE.
4770
4771 Record such modified types already made so we don't make duplicates. */
4772
4773 tree
4774 build_type_attribute_variant (tree ttype, tree attribute)
4775 {
4776 return build_type_attribute_qual_variant (ttype, attribute,
4777 TYPE_QUALS (ttype));
4778 }
4779
4780
4781 /* Reset the expression *EXPR_P, a size or position.
4782
4783 ??? We could reset all non-constant sizes or positions. But it's cheap
4784 enough to not do so and refrain from adding workarounds to dwarf2out.c.
4785
4786 We need to reset self-referential sizes or positions because they cannot
4787 be gimplified and thus can contain a CALL_EXPR after the gimplification
4788 is finished, which will run afoul of LTO streaming. And they need to be
4789 reset to something essentially dummy but not constant, so as to preserve
4790 the properties of the object they are attached to. */
4791
4792 static inline void
4793 free_lang_data_in_one_sizepos (tree *expr_p)
4794 {
4795 tree expr = *expr_p;
4796 if (CONTAINS_PLACEHOLDER_P (expr))
4797 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
4798 }
4799
4800
4801 /* Reset all the fields in a binfo node BINFO. We only keep
4802 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
4803
4804 static void
4805 free_lang_data_in_binfo (tree binfo)
4806 {
4807 unsigned i;
4808 tree t;
4809
4810 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
4811
4812 BINFO_VIRTUALS (binfo) = NULL_TREE;
4813 BINFO_BASE_ACCESSES (binfo) = NULL;
4814 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
4815 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
4816
4817 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
4818 free_lang_data_in_binfo (t);
4819 }
4820
4821
4822 /* Reset all language specific information still present in TYPE. */
4823
4824 static void
4825 free_lang_data_in_type (tree type)
4826 {
4827 gcc_assert (TYPE_P (type));
4828
4829 /* Give the FE a chance to remove its own data first. */
4830 lang_hooks.free_lang_data (type);
4831
4832 TREE_LANG_FLAG_0 (type) = 0;
4833 TREE_LANG_FLAG_1 (type) = 0;
4834 TREE_LANG_FLAG_2 (type) = 0;
4835 TREE_LANG_FLAG_3 (type) = 0;
4836 TREE_LANG_FLAG_4 (type) = 0;
4837 TREE_LANG_FLAG_5 (type) = 0;
4838 TREE_LANG_FLAG_6 (type) = 0;
4839
4840 if (TREE_CODE (type) == FUNCTION_TYPE)
4841 {
4842 /* Remove the const and volatile qualifiers from arguments. The
4843 C++ front end removes them, but the C front end does not,
4844 leading to false ODR violation errors when merging two
4845 instances of the same function signature compiled by
4846 different front ends. */
4847 tree p;
4848
4849 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
4850 {
4851 tree arg_type = TREE_VALUE (p);
4852
4853 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
4854 {
4855 int quals = TYPE_QUALS (arg_type)
4856 & ~TYPE_QUAL_CONST
4857 & ~TYPE_QUAL_VOLATILE;
4858 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
4859 free_lang_data_in_type (TREE_VALUE (p));
4860 }
4861 }
4862 }
4863
4864 /* Remove members that are not actually FIELD_DECLs from the field
4865 list of an aggregate. These occur in C++. */
4866 if (RECORD_OR_UNION_TYPE_P (type))
4867 {
4868 tree prev, member;
4869
4870 /* Note that TYPE_FIELDS can be shared across distinct
4871 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is
4872 to be removed, we cannot set its TREE_CHAIN to NULL.
4873 Otherwise, we would not be able to find all the other fields
4874 in the other instances of this TREE_TYPE.
4875
4876 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */
4877 prev = NULL_TREE;
4878 member = TYPE_FIELDS (type);
4879 while (member)
4880 {
4881 if (TREE_CODE (member) == FIELD_DECL
4882 || TREE_CODE (member) == TYPE_DECL)
4883 {
4884 if (prev)
4885 TREE_CHAIN (prev) = member;
4886 else
4887 TYPE_FIELDS (type) = member;
4888 prev = member;
4889 }
4890
4891 member = TREE_CHAIN (member);
4892 }
4893
4894 if (prev)
4895 TREE_CHAIN (prev) = NULL_TREE;
4896 else
4897 TYPE_FIELDS (type) = NULL_TREE;
4898
4899 TYPE_METHODS (type) = NULL_TREE;
4900 if (TYPE_BINFO (type))
4901 free_lang_data_in_binfo (TYPE_BINFO (type));
4902 }
4903 else
4904 {
4905 /* For non-aggregate types, clear out the language slot (which
4906 overloads TYPE_BINFO). */
4907 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
4908
4909 if (INTEGRAL_TYPE_P (type)
4910 || SCALAR_FLOAT_TYPE_P (type)
4911 || FIXED_POINT_TYPE_P (type))
4912 {
4913 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
4914 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
4915 }
4916 }
4917
4918 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
4919 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
4920
4921 if (TYPE_CONTEXT (type)
4922 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
4923 {
4924 tree ctx = TYPE_CONTEXT (type);
4925 do
4926 {
4927 ctx = BLOCK_SUPERCONTEXT (ctx);
4928 }
4929 while (ctx && TREE_CODE (ctx) == BLOCK);
4930 TYPE_CONTEXT (type) = ctx;
4931 }
4932 }
4933
4934
4935 /* Return true if DECL may need an assembler name to be set. */
4936
4937 static inline bool
4938 need_assembler_name_p (tree decl)
4939 {
4940 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
4941 if (TREE_CODE (decl) != FUNCTION_DECL
4942 && TREE_CODE (decl) != VAR_DECL)
4943 return false;
4944
4945 /* If DECL already has its assembler name set, it does not need a
4946 new one. */
4947 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
4948 || DECL_ASSEMBLER_NAME_SET_P (decl))
4949 return false;
4950
4951 /* Abstract decls do not need an assembler name. */
4952 if (DECL_ABSTRACT (decl))
4953 return false;
4954
4955 /* For VAR_DECLs, only static, public and external symbols need an
4956 assembler name. */
4957 if (TREE_CODE (decl) == VAR_DECL
4958 && !TREE_STATIC (decl)
4959 && !TREE_PUBLIC (decl)
4960 && !DECL_EXTERNAL (decl))
4961 return false;
4962
4963 if (TREE_CODE (decl) == FUNCTION_DECL)
4964 {
4965 /* Do not set assembler name on builtins. Allow RTL expansion to
4966 decide whether to expand inline or via a regular call. */
4967 if (DECL_BUILT_IN (decl)
4968 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
4969 return false;
4970
4971 /* Functions represented in the callgraph need an assembler name. */
4972 if (cgraph_get_node (decl) != NULL)
4973 return true;
4974
4975 /* Unused and not public functions don't need an assembler name. */
4976 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
4977 return false;
4978 }
4979
4980 return true;
4981 }
4982
4983
4984 /* Reset all language specific information still present in symbol
4985 DECL. */
4986
4987 static void
4988 free_lang_data_in_decl (tree decl)
4989 {
4990 gcc_assert (DECL_P (decl));
4991
4992 /* Give the FE a chance to remove its own data first. */
4993 lang_hooks.free_lang_data (decl);
4994
4995 TREE_LANG_FLAG_0 (decl) = 0;
4996 TREE_LANG_FLAG_1 (decl) = 0;
4997 TREE_LANG_FLAG_2 (decl) = 0;
4998 TREE_LANG_FLAG_3 (decl) = 0;
4999 TREE_LANG_FLAG_4 (decl) = 0;
5000 TREE_LANG_FLAG_5 (decl) = 0;
5001 TREE_LANG_FLAG_6 (decl) = 0;
5002
5003 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5004 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5005 if (TREE_CODE (decl) == FIELD_DECL)
5006 {
5007 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5008 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5009 DECL_QUALIFIER (decl) = NULL_TREE;
5010 }
5011
5012 if (TREE_CODE (decl) == FUNCTION_DECL)
5013 {
5014 struct cgraph_node *node;
5015 if (!(node = cgraph_get_node (decl))
5016 || (!node->symbol.definition && !node->clones))
5017 {
5018 if (node)
5019 cgraph_release_function_body (node);
5020 else
5021 {
5022 release_function_body (decl);
5023 DECL_ARGUMENTS (decl) = NULL;
5024 DECL_RESULT (decl) = NULL;
5025 DECL_INITIAL (decl) = error_mark_node;
5026 }
5027 }
5028 if (gimple_has_body_p (decl))
5029 {
5030 tree t;
5031
5032 /* If DECL has a gimple body, then the context for its
5033 arguments must be DECL. Otherwise, it doesn't really
5034 matter, as we will not be emitting any code for DECL. In
5035 general, there may be other instances of DECL created by
5036 the front end and since PARM_DECLs are generally shared,
5037 their DECL_CONTEXT changes as the replicas of DECL are
5038 created. The only time where DECL_CONTEXT is important
5039 is for the FUNCTION_DECLs that have a gimple body (since
5040 the PARM_DECL will be used in the function's body). */
5041 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5042 DECL_CONTEXT (t) = decl;
5043 }
5044
5045 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5046 At this point, it is not needed anymore. */
5047 DECL_SAVED_TREE (decl) = NULL_TREE;
5048
5049 /* Clear the abstract origin if it refers to a method. Otherwise
5050 dwarf2out.c will ICE as we clear TYPE_METHODS and thus the
5051 origin will not be output correctly. */
5052 if (DECL_ABSTRACT_ORIGIN (decl)
5053 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5054 && RECORD_OR_UNION_TYPE_P
5055 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5056 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5057
5058 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5059 DECL_VINDEX referring to itself into a vtable slot number as it
5060 should. Happens with functions that are copied and then forgotten
5061 about. Just clear it, it won't matter anymore. */
5062 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl)))
5063 DECL_VINDEX (decl) = NULL_TREE;
5064 }
5065 else if (TREE_CODE (decl) == VAR_DECL)
5066 {
5067 if ((DECL_EXTERNAL (decl)
5068 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5069 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5070 DECL_INITIAL (decl) = NULL_TREE;
5071 }
5072 else if (TREE_CODE (decl) == TYPE_DECL
5073 || TREE_CODE (decl) == FIELD_DECL)
5074 DECL_INITIAL (decl) = NULL_TREE;
5075 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5076 && DECL_INITIAL (decl)
5077 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5078 {
5079 /* Strip builtins from the translation-unit BLOCK. We still have targets
5080 without builtin_decl_explicit support and also builtins are shared
5081 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5082 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5083 while (*nextp)
5084 {
5085 tree var = *nextp;
5086 if (TREE_CODE (var) == FUNCTION_DECL
5087 && DECL_BUILT_IN (var))
5088 *nextp = TREE_CHAIN (var);
5089 else
5090 nextp = &TREE_CHAIN (var);
5091 }
5092 }
5093 }
5094
5095
5096 /* Data used when collecting DECLs and TYPEs for language data removal. */
5097
5098 struct free_lang_data_d
5099 {
5100 /* Worklist to avoid excessive recursion. */
5101 vec<tree> worklist;
5102
5103 /* Set of traversed objects. Used to avoid duplicate visits. */
5104 struct pointer_set_t *pset;
5105
5106 /* Array of symbols to process with free_lang_data_in_decl. */
5107 vec<tree> decls;
5108
5109 /* Array of types to process with free_lang_data_in_type. */
5110 vec<tree> types;
5111 };
5112
5113
5114 /* Save all language fields needed to generate proper debug information
5115 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5116
5117 static void
5118 save_debug_info_for_decl (tree t)
5119 {
5120 /*struct saved_debug_info_d *sdi;*/
5121
5122 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t));
5123
5124 /* FIXME. Partial implementation for saving debug info removed. */
5125 }
5126
5127
5128 /* Save all language fields needed to generate proper debug information
5129 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5130
5131 static void
5132 save_debug_info_for_type (tree t)
5133 {
5134 /*struct saved_debug_info_d *sdi;*/
5135
5136 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t));
5137
5138 /* FIXME. Partial implementation for saving debug info removed. */
5139 }
5140
5141
5142 /* Add type or decl T to one of the list of tree nodes that need their
5143 language data removed. The lists are held inside FLD. */
5144
5145 static void
5146 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5147 {
5148 if (DECL_P (t))
5149 {
5150 fld->decls.safe_push (t);
5151 if (debug_info_level > DINFO_LEVEL_TERSE)
5152 save_debug_info_for_decl (t);
5153 }
5154 else if (TYPE_P (t))
5155 {
5156 fld->types.safe_push (t);
5157 if (debug_info_level > DINFO_LEVEL_TERSE)
5158 save_debug_info_for_type (t);
5159 }
5160 else
5161 gcc_unreachable ();
5162 }
5163
5164 /* Push tree node T into FLD->WORKLIST. */
5165
5166 static inline void
5167 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5168 {
5169 if (t && !is_lang_specific (t) && !pointer_set_contains (fld->pset, t))
5170 fld->worklist.safe_push ((t));
5171 }
5172
5173
5174 /* Operand callback helper for free_lang_data_in_node. *TP is the
5175 subtree operand being considered. */
5176
5177 static tree
5178 find_decls_types_r (tree *tp, int *ws, void *data)
5179 {
5180 tree t = *tp;
5181 struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5182
5183 if (TREE_CODE (t) == TREE_LIST)
5184 return NULL_TREE;
5185
5186 /* Language specific nodes will be removed, so there is no need
5187 to gather anything under them. */
5188 if (is_lang_specific (t))
5189 {
5190 *ws = 0;
5191 return NULL_TREE;
5192 }
5193
5194 if (DECL_P (t))
5195 {
5196 /* Note that walk_tree does not traverse every possible field in
5197 decls, so we have to do our own traversals here. */
5198 add_tree_to_fld_list (t, fld);
5199
5200 fld_worklist_push (DECL_NAME (t), fld);
5201 fld_worklist_push (DECL_CONTEXT (t), fld);
5202 fld_worklist_push (DECL_SIZE (t), fld);
5203 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5204
5205 /* We are going to remove everything under DECL_INITIAL for
5206 TYPE_DECLs. No point walking them. */
5207 if (TREE_CODE (t) != TYPE_DECL)
5208 fld_worklist_push (DECL_INITIAL (t), fld);
5209
5210 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5211 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5212
5213 if (TREE_CODE (t) == FUNCTION_DECL)
5214 {
5215 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5216 fld_worklist_push (DECL_RESULT (t), fld);
5217 }
5218 else if (TREE_CODE (t) == TYPE_DECL)
5219 {
5220 fld_worklist_push (DECL_ARGUMENT_FLD (t), fld);
5221 fld_worklist_push (DECL_VINDEX (t), fld);
5222 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
5223 }
5224 else if (TREE_CODE (t) == FIELD_DECL)
5225 {
5226 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5227 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5228 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5229 fld_worklist_push (DECL_FCONTEXT (t), fld);
5230 }
5231 else if (TREE_CODE (t) == VAR_DECL)
5232 {
5233 fld_worklist_push (DECL_SECTION_NAME (t), fld);
5234 fld_worklist_push (DECL_COMDAT_GROUP (t), fld);
5235 }
5236
5237 if ((TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
5238 && DECL_HAS_VALUE_EXPR_P (t))
5239 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5240
5241 if (TREE_CODE (t) != FIELD_DECL
5242 && TREE_CODE (t) != TYPE_DECL)
5243 fld_worklist_push (TREE_CHAIN (t), fld);
5244 *ws = 0;
5245 }
5246 else if (TYPE_P (t))
5247 {
5248 /* Note that walk_tree does not traverse every possible field in
5249 types, so we have to do our own traversals here. */
5250 add_tree_to_fld_list (t, fld);
5251
5252 if (!RECORD_OR_UNION_TYPE_P (t))
5253 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5254 fld_worklist_push (TYPE_SIZE (t), fld);
5255 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5256 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5257 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5258 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5259 fld_worklist_push (TYPE_NAME (t), fld);
5260 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5261 them and thus do not and want not to reach unused pointer types
5262 this way. */
5263 if (!POINTER_TYPE_P (t))
5264 fld_worklist_push (TYPE_MINVAL (t), fld);
5265 if (!RECORD_OR_UNION_TYPE_P (t))
5266 fld_worklist_push (TYPE_MAXVAL (t), fld);
5267 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5268 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5269 do not and want not to reach unused variants this way. */
5270 if (TYPE_CONTEXT (t))
5271 {
5272 tree ctx = TYPE_CONTEXT (t);
5273 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5274 So push that instead. */
5275 while (ctx && TREE_CODE (ctx) == BLOCK)
5276 ctx = BLOCK_SUPERCONTEXT (ctx);
5277 fld_worklist_push (ctx, fld);
5278 }
5279 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5280 and want not to reach unused types this way. */
5281
5282 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5283 {
5284 unsigned i;
5285 tree tem;
5286 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5287 fld_worklist_push (TREE_TYPE (tem), fld);
5288 tem = BINFO_VIRTUALS (TYPE_BINFO (t));
5289 if (tem
5290 /* The Java FE overloads BINFO_VIRTUALS for its own purpose. */
5291 && TREE_CODE (tem) == TREE_LIST)
5292 do
5293 {
5294 fld_worklist_push (TREE_VALUE (tem), fld);
5295 tem = TREE_CHAIN (tem);
5296 }
5297 while (tem);
5298 }
5299 if (RECORD_OR_UNION_TYPE_P (t))
5300 {
5301 tree tem;
5302 /* Push all TYPE_FIELDS - there can be interleaving interesting
5303 and non-interesting things. */
5304 tem = TYPE_FIELDS (t);
5305 while (tem)
5306 {
5307 if (TREE_CODE (tem) == FIELD_DECL
5308 || TREE_CODE (tem) == TYPE_DECL)
5309 fld_worklist_push (tem, fld);
5310 tem = TREE_CHAIN (tem);
5311 }
5312 }
5313
5314 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5315 *ws = 0;
5316 }
5317 else if (TREE_CODE (t) == BLOCK)
5318 {
5319 tree tem;
5320 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
5321 fld_worklist_push (tem, fld);
5322 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5323 fld_worklist_push (tem, fld);
5324 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5325 }
5326
5327 if (TREE_CODE (t) != IDENTIFIER_NODE
5328 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5329 fld_worklist_push (TREE_TYPE (t), fld);
5330
5331 return NULL_TREE;
5332 }
5333
5334
5335 /* Find decls and types in T. */
5336
5337 static void
5338 find_decls_types (tree t, struct free_lang_data_d *fld)
5339 {
5340 while (1)
5341 {
5342 if (!pointer_set_contains (fld->pset, t))
5343 walk_tree (&t, find_decls_types_r, fld, fld->pset);
5344 if (fld->worklist.is_empty ())
5345 break;
5346 t = fld->worklist.pop ();
5347 }
5348 }
5349
5350 /* Translate all the types in LIST with the corresponding runtime
5351 types. */
5352
5353 static tree
5354 get_eh_types_for_runtime (tree list)
5355 {
5356 tree head, prev;
5357
5358 if (list == NULL_TREE)
5359 return NULL_TREE;
5360
5361 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5362 prev = head;
5363 list = TREE_CHAIN (list);
5364 while (list)
5365 {
5366 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5367 TREE_CHAIN (prev) = n;
5368 prev = TREE_CHAIN (prev);
5369 list = TREE_CHAIN (list);
5370 }
5371
5372 return head;
5373 }
5374
5375
5376 /* Find decls and types referenced in EH region R and store them in
5377 FLD->DECLS and FLD->TYPES. */
5378
5379 static void
5380 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5381 {
5382 switch (r->type)
5383 {
5384 case ERT_CLEANUP:
5385 break;
5386
5387 case ERT_TRY:
5388 {
5389 eh_catch c;
5390
5391 /* The types referenced in each catch must first be changed to the
5392 EH types used at runtime. This removes references to FE types
5393 in the region. */
5394 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5395 {
5396 c->type_list = get_eh_types_for_runtime (c->type_list);
5397 walk_tree (&c->type_list, find_decls_types_r, fld, fld->pset);
5398 }
5399 }
5400 break;
5401
5402 case ERT_ALLOWED_EXCEPTIONS:
5403 r->u.allowed.type_list
5404 = get_eh_types_for_runtime (r->u.allowed.type_list);
5405 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, fld->pset);
5406 break;
5407
5408 case ERT_MUST_NOT_THROW:
5409 walk_tree (&r->u.must_not_throw.failure_decl,
5410 find_decls_types_r, fld, fld->pset);
5411 break;
5412 }
5413 }
5414
5415
5416 /* Find decls and types referenced in cgraph node N and store them in
5417 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5418 look for *every* kind of DECL and TYPE node reachable from N,
5419 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5420 NAMESPACE_DECLs, etc). */
5421
5422 static void
5423 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5424 {
5425 basic_block bb;
5426 struct function *fn;
5427 unsigned ix;
5428 tree t;
5429
5430 find_decls_types (n->symbol.decl, fld);
5431
5432 if (!gimple_has_body_p (n->symbol.decl))
5433 return;
5434
5435 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5436
5437 fn = DECL_STRUCT_FUNCTION (n->symbol.decl);
5438
5439 /* Traverse locals. */
5440 FOR_EACH_LOCAL_DECL (fn, ix, t)
5441 find_decls_types (t, fld);
5442
5443 /* Traverse EH regions in FN. */
5444 {
5445 eh_region r;
5446 FOR_ALL_EH_REGION_FN (r, fn)
5447 find_decls_types_in_eh_region (r, fld);
5448 }
5449
5450 /* Traverse every statement in FN. */
5451 FOR_EACH_BB_FN (bb, fn)
5452 {
5453 gimple_stmt_iterator si;
5454 unsigned i;
5455
5456 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
5457 {
5458 gimple phi = gsi_stmt (si);
5459
5460 for (i = 0; i < gimple_phi_num_args (phi); i++)
5461 {
5462 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5463 find_decls_types (*arg_p, fld);
5464 }
5465 }
5466
5467 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5468 {
5469 gimple stmt = gsi_stmt (si);
5470
5471 if (is_gimple_call (stmt))
5472 find_decls_types (gimple_call_fntype (stmt), fld);
5473
5474 for (i = 0; i < gimple_num_ops (stmt); i++)
5475 {
5476 tree arg = gimple_op (stmt, i);
5477 find_decls_types (arg, fld);
5478 }
5479 }
5480 }
5481 }
5482
5483
5484 /* Find decls and types referenced in varpool node N and store them in
5485 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5486 look for *every* kind of DECL and TYPE node reachable from N,
5487 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5488 NAMESPACE_DECLs, etc). */
5489
5490 static void
5491 find_decls_types_in_var (struct varpool_node *v, struct free_lang_data_d *fld)
5492 {
5493 find_decls_types (v->symbol.decl, fld);
5494 }
5495
5496 /* If T needs an assembler name, have one created for it. */
5497
5498 void
5499 assign_assembler_name_if_neeeded (tree t)
5500 {
5501 if (need_assembler_name_p (t))
5502 {
5503 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5504 diagnostics that use input_location to show locus
5505 information. The problem here is that, at this point,
5506 input_location is generally anchored to the end of the file
5507 (since the parser is long gone), so we don't have a good
5508 position to pin it to.
5509
5510 To alleviate this problem, this uses the location of T's
5511 declaration. Examples of this are
5512 testsuite/g++.dg/template/cond2.C and
5513 testsuite/g++.dg/template/pr35240.C. */
5514 location_t saved_location = input_location;
5515 input_location = DECL_SOURCE_LOCATION (t);
5516
5517 decl_assembler_name (t);
5518
5519 input_location = saved_location;
5520 }
5521 }
5522
5523
5524 /* Free language specific information for every operand and expression
5525 in every node of the call graph. This process operates in three stages:
5526
5527 1- Every callgraph node and varpool node is traversed looking for
5528 decls and types embedded in them. This is a more exhaustive
5529 search than that done by find_referenced_vars, because it will
5530 also collect individual fields, decls embedded in types, etc.
5531
5532 2- All the decls found are sent to free_lang_data_in_decl.
5533
5534 3- All the types found are sent to free_lang_data_in_type.
5535
5536 The ordering between decls and types is important because
5537 free_lang_data_in_decl sets assembler names, which includes
5538 mangling. So types cannot be freed up until assembler names have
5539 been set up. */
5540
5541 static void
5542 free_lang_data_in_cgraph (void)
5543 {
5544 struct cgraph_node *n;
5545 struct varpool_node *v;
5546 struct free_lang_data_d fld;
5547 tree t;
5548 unsigned i;
5549 alias_pair *p;
5550
5551 /* Initialize sets and arrays to store referenced decls and types. */
5552 fld.pset = pointer_set_create ();
5553 fld.worklist.create (0);
5554 fld.decls.create (100);
5555 fld.types.create (100);
5556
5557 /* Find decls and types in the body of every function in the callgraph. */
5558 FOR_EACH_FUNCTION (n)
5559 find_decls_types_in_node (n, &fld);
5560
5561 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
5562 find_decls_types (p->decl, &fld);
5563
5564 /* Find decls and types in every varpool symbol. */
5565 FOR_EACH_VARIABLE (v)
5566 find_decls_types_in_var (v, &fld);
5567
5568 /* Set the assembler name on every decl found. We need to do this
5569 now because free_lang_data_in_decl will invalidate data needed
5570 for mangling. This breaks mangling on interdependent decls. */
5571 FOR_EACH_VEC_ELT (fld.decls, i, t)
5572 assign_assembler_name_if_neeeded (t);
5573
5574 /* Traverse every decl found freeing its language data. */
5575 FOR_EACH_VEC_ELT (fld.decls, i, t)
5576 free_lang_data_in_decl (t);
5577
5578 /* Traverse every type found freeing its language data. */
5579 FOR_EACH_VEC_ELT (fld.types, i, t)
5580 free_lang_data_in_type (t);
5581
5582 pointer_set_destroy (fld.pset);
5583 fld.worklist.release ();
5584 fld.decls.release ();
5585 fld.types.release ();
5586 }
5587
5588
5589 /* Free resources that are used by FE but are not needed once they are done. */
5590
5591 static unsigned
5592 free_lang_data (void)
5593 {
5594 unsigned i;
5595
5596 /* If we are the LTO frontend we have freed lang-specific data already. */
5597 if (in_lto_p
5598 || !flag_generate_lto)
5599 return 0;
5600
5601 /* Allocate and assign alias sets to the standard integer types
5602 while the slots are still in the way the frontends generated them. */
5603 for (i = 0; i < itk_none; ++i)
5604 if (integer_types[i])
5605 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
5606
5607 /* Traverse the IL resetting language specific information for
5608 operands, expressions, etc. */
5609 free_lang_data_in_cgraph ();
5610
5611 /* Create gimple variants for common types. */
5612 ptrdiff_type_node = integer_type_node;
5613 fileptr_type_node = ptr_type_node;
5614
5615 /* Reset some langhooks. Do not reset types_compatible_p, it may
5616 still be used indirectly via the get_alias_set langhook. */
5617 lang_hooks.dwarf_name = lhd_dwarf_name;
5618 lang_hooks.decl_printable_name = gimple_decl_printable_name;
5619 /* We do not want the default decl_assembler_name implementation,
5620 rather if we have fixed everything we want a wrapper around it
5621 asserting that all non-local symbols already got their assembler
5622 name and only produce assembler names for local symbols. Or rather
5623 make sure we never call decl_assembler_name on local symbols and
5624 devise a separate, middle-end private scheme for it. */
5625
5626 /* Reset diagnostic machinery. */
5627 tree_diagnostics_defaults (global_dc);
5628
5629 return 0;
5630 }
5631
5632
5633 namespace {
5634
5635 const pass_data pass_data_ipa_free_lang_data =
5636 {
5637 SIMPLE_IPA_PASS, /* type */
5638 "*free_lang_data", /* name */
5639 OPTGROUP_NONE, /* optinfo_flags */
5640 false, /* has_gate */
5641 true, /* has_execute */
5642 TV_IPA_FREE_LANG_DATA, /* tv_id */
5643 0, /* properties_required */
5644 0, /* properties_provided */
5645 0, /* properties_destroyed */
5646 0, /* todo_flags_start */
5647 0, /* todo_flags_finish */
5648 };
5649
5650 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
5651 {
5652 public:
5653 pass_ipa_free_lang_data (gcc::context *ctxt)
5654 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
5655 {}
5656
5657 /* opt_pass methods: */
5658 unsigned int execute () { return free_lang_data (); }
5659
5660 }; // class pass_ipa_free_lang_data
5661
5662 } // anon namespace
5663
5664 simple_ipa_opt_pass *
5665 make_pass_ipa_free_lang_data (gcc::context *ctxt)
5666 {
5667 return new pass_ipa_free_lang_data (ctxt);
5668 }
5669
5670 /* The backbone of is_attribute_p(). ATTR_LEN is the string length of
5671 ATTR_NAME. Also used internally by remove_attribute(). */
5672 bool
5673 private_is_attribute_p (const char *attr_name, size_t attr_len, const_tree ident)
5674 {
5675 size_t ident_len = IDENTIFIER_LENGTH (ident);
5676
5677 if (ident_len == attr_len)
5678 {
5679 if (strcmp (attr_name, IDENTIFIER_POINTER (ident)) == 0)
5680 return true;
5681 }
5682 else if (ident_len == attr_len + 4)
5683 {
5684 /* There is the possibility that ATTR is 'text' and IDENT is
5685 '__text__'. */
5686 const char *p = IDENTIFIER_POINTER (ident);
5687 if (p[0] == '_' && p[1] == '_'
5688 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5689 && strncmp (attr_name, p + 2, attr_len) == 0)
5690 return true;
5691 }
5692
5693 return false;
5694 }
5695
5696 /* The backbone of lookup_attribute(). ATTR_LEN is the string length
5697 of ATTR_NAME, and LIST is not NULL_TREE. */
5698 tree
5699 private_lookup_attribute (const char *attr_name, size_t attr_len, tree list)
5700 {
5701 while (list)
5702 {
5703 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5704
5705 if (ident_len == attr_len)
5706 {
5707 if (!strcmp (attr_name,
5708 IDENTIFIER_POINTER (get_attribute_name (list))))
5709 break;
5710 }
5711 /* TODO: If we made sure that attributes were stored in the
5712 canonical form without '__...__' (ie, as in 'text' as opposed
5713 to '__text__') then we could avoid the following case. */
5714 else if (ident_len == attr_len + 4)
5715 {
5716 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5717 if (p[0] == '_' && p[1] == '_'
5718 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5719 && strncmp (attr_name, p + 2, attr_len) == 0)
5720 break;
5721 }
5722 list = TREE_CHAIN (list);
5723 }
5724
5725 return list;
5726 }
5727
5728 /* A variant of lookup_attribute() that can be used with an identifier
5729 as the first argument, and where the identifier can be either
5730 'text' or '__text__'.
5731
5732 Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST,
5733 return a pointer to the attribute's list element if the attribute
5734 is part of the list, or NULL_TREE if not found. If the attribute
5735 appears more than once, this only returns the first occurrence; the
5736 TREE_CHAIN of the return value should be passed back in if further
5737 occurrences are wanted. ATTR_IDENTIFIER must be an identifier but
5738 can be in the form 'text' or '__text__'. */
5739 static tree
5740 lookup_ident_attribute (tree attr_identifier, tree list)
5741 {
5742 gcc_checking_assert (TREE_CODE (attr_identifier) == IDENTIFIER_NODE);
5743
5744 while (list)
5745 {
5746 gcc_checking_assert (TREE_CODE (get_attribute_name (list))
5747 == IDENTIFIER_NODE);
5748
5749 /* Identifiers can be compared directly for equality. */
5750 if (attr_identifier == get_attribute_name (list))
5751 break;
5752
5753 /* If they are not equal, they may still be one in the form
5754 'text' while the other one is in the form '__text__'. TODO:
5755 If we were storing attributes in normalized 'text' form, then
5756 this could all go away and we could take full advantage of
5757 the fact that we're comparing identifiers. :-) */
5758 {
5759 size_t attr_len = IDENTIFIER_LENGTH (attr_identifier);
5760 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5761
5762 if (ident_len == attr_len + 4)
5763 {
5764 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5765 const char *q = IDENTIFIER_POINTER (attr_identifier);
5766 if (p[0] == '_' && p[1] == '_'
5767 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5768 && strncmp (q, p + 2, attr_len) == 0)
5769 break;
5770 }
5771 else if (ident_len + 4 == attr_len)
5772 {
5773 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5774 const char *q = IDENTIFIER_POINTER (attr_identifier);
5775 if (q[0] == '_' && q[1] == '_'
5776 && q[attr_len - 2] == '_' && q[attr_len - 1] == '_'
5777 && strncmp (q + 2, p, ident_len) == 0)
5778 break;
5779 }
5780 }
5781 list = TREE_CHAIN (list);
5782 }
5783
5784 return list;
5785 }
5786
5787 /* Remove any instances of attribute ATTR_NAME in LIST and return the
5788 modified list. */
5789
5790 tree
5791 remove_attribute (const char *attr_name, tree list)
5792 {
5793 tree *p;
5794 size_t attr_len = strlen (attr_name);
5795
5796 gcc_checking_assert (attr_name[0] != '_');
5797
5798 for (p = &list; *p; )
5799 {
5800 tree l = *p;
5801 /* TODO: If we were storing attributes in normalized form, here
5802 we could use a simple strcmp(). */
5803 if (private_is_attribute_p (attr_name, attr_len, get_attribute_name (l)))
5804 *p = TREE_CHAIN (l);
5805 else
5806 p = &TREE_CHAIN (l);
5807 }
5808
5809 return list;
5810 }
5811
5812 /* Return an attribute list that is the union of a1 and a2. */
5813
5814 tree
5815 merge_attributes (tree a1, tree a2)
5816 {
5817 tree attributes;
5818
5819 /* Either one unset? Take the set one. */
5820
5821 if ((attributes = a1) == 0)
5822 attributes = a2;
5823
5824 /* One that completely contains the other? Take it. */
5825
5826 else if (a2 != 0 && ! attribute_list_contained (a1, a2))
5827 {
5828 if (attribute_list_contained (a2, a1))
5829 attributes = a2;
5830 else
5831 {
5832 /* Pick the longest list, and hang on the other list. */
5833
5834 if (list_length (a1) < list_length (a2))
5835 attributes = a2, a2 = a1;
5836
5837 for (; a2 != 0; a2 = TREE_CHAIN (a2))
5838 {
5839 tree a;
5840 for (a = lookup_ident_attribute (get_attribute_name (a2),
5841 attributes);
5842 a != NULL_TREE && !attribute_value_equal (a, a2);
5843 a = lookup_ident_attribute (get_attribute_name (a2),
5844 TREE_CHAIN (a)))
5845 ;
5846 if (a == NULL_TREE)
5847 {
5848 a1 = copy_node (a2);
5849 TREE_CHAIN (a1) = attributes;
5850 attributes = a1;
5851 }
5852 }
5853 }
5854 }
5855 return attributes;
5856 }
5857
5858 /* Given types T1 and T2, merge their attributes and return
5859 the result. */
5860
5861 tree
5862 merge_type_attributes (tree t1, tree t2)
5863 {
5864 return merge_attributes (TYPE_ATTRIBUTES (t1),
5865 TYPE_ATTRIBUTES (t2));
5866 }
5867
5868 /* Given decls OLDDECL and NEWDECL, merge their attributes and return
5869 the result. */
5870
5871 tree
5872 merge_decl_attributes (tree olddecl, tree newdecl)
5873 {
5874 return merge_attributes (DECL_ATTRIBUTES (olddecl),
5875 DECL_ATTRIBUTES (newdecl));
5876 }
5877
5878 #if TARGET_DLLIMPORT_DECL_ATTRIBUTES
5879
5880 /* Specialization of merge_decl_attributes for various Windows targets.
5881
5882 This handles the following situation:
5883
5884 __declspec (dllimport) int foo;
5885 int foo;
5886
5887 The second instance of `foo' nullifies the dllimport. */
5888
5889 tree
5890 merge_dllimport_decl_attributes (tree old, tree new_tree)
5891 {
5892 tree a;
5893 int delete_dllimport_p = 1;
5894
5895 /* What we need to do here is remove from `old' dllimport if it doesn't
5896 appear in `new'. dllimport behaves like extern: if a declaration is
5897 marked dllimport and a definition appears later, then the object
5898 is not dllimport'd. We also remove a `new' dllimport if the old list
5899 contains dllexport: dllexport always overrides dllimport, regardless
5900 of the order of declaration. */
5901 if (!VAR_OR_FUNCTION_DECL_P (new_tree))
5902 delete_dllimport_p = 0;
5903 else if (DECL_DLLIMPORT_P (new_tree)
5904 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old)))
5905 {
5906 DECL_DLLIMPORT_P (new_tree) = 0;
5907 warning (OPT_Wattributes, "%q+D already declared with dllexport attribute: "
5908 "dllimport ignored", new_tree);
5909 }
5910 else if (DECL_DLLIMPORT_P (old) && !DECL_DLLIMPORT_P (new_tree))
5911 {
5912 /* Warn about overriding a symbol that has already been used, e.g.:
5913 extern int __attribute__ ((dllimport)) foo;
5914 int* bar () {return &foo;}
5915 int foo;
5916 */
5917 if (TREE_USED (old))
5918 {
5919 warning (0, "%q+D redeclared without dllimport attribute "
5920 "after being referenced with dll linkage", new_tree);
5921 /* If we have used a variable's address with dllimport linkage,
5922 keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the
5923 decl may already have had TREE_CONSTANT computed.
5924 We still remove the attribute so that assembler code refers
5925 to '&foo rather than '_imp__foo'. */
5926 if (TREE_CODE (old) == VAR_DECL && TREE_ADDRESSABLE (old))
5927 DECL_DLLIMPORT_P (new_tree) = 1;
5928 }
5929
5930 /* Let an inline definition silently override the external reference,
5931 but otherwise warn about attribute inconsistency. */
5932 else if (TREE_CODE (new_tree) == VAR_DECL
5933 || !DECL_DECLARED_INLINE_P (new_tree))
5934 warning (OPT_Wattributes, "%q+D redeclared without dllimport attribute: "
5935 "previous dllimport ignored", new_tree);
5936 }
5937 else
5938 delete_dllimport_p = 0;
5939
5940 a = merge_attributes (DECL_ATTRIBUTES (old), DECL_ATTRIBUTES (new_tree));
5941
5942 if (delete_dllimport_p)
5943 a = remove_attribute ("dllimport", a);
5944
5945 return a;
5946 }
5947
5948 /* Handle a "dllimport" or "dllexport" attribute; arguments as in
5949 struct attribute_spec.handler. */
5950
5951 tree
5952 handle_dll_attribute (tree * pnode, tree name, tree args, int flags,
5953 bool *no_add_attrs)
5954 {
5955 tree node = *pnode;
5956 bool is_dllimport;
5957
5958 /* These attributes may apply to structure and union types being created,
5959 but otherwise should pass to the declaration involved. */
5960 if (!DECL_P (node))
5961 {
5962 if (flags & ((int) ATTR_FLAG_DECL_NEXT | (int) ATTR_FLAG_FUNCTION_NEXT
5963 | (int) ATTR_FLAG_ARRAY_NEXT))
5964 {
5965 *no_add_attrs = true;
5966 return tree_cons (name, args, NULL_TREE);
5967 }
5968 if (TREE_CODE (node) == RECORD_TYPE
5969 || TREE_CODE (node) == UNION_TYPE)
5970 {
5971 node = TYPE_NAME (node);
5972 if (!node)
5973 return NULL_TREE;
5974 }
5975 else
5976 {
5977 warning (OPT_Wattributes, "%qE attribute ignored",
5978 name);
5979 *no_add_attrs = true;
5980 return NULL_TREE;
5981 }
5982 }
5983
5984 if (TREE_CODE (node) != FUNCTION_DECL
5985 && TREE_CODE (node) != VAR_DECL
5986 && TREE_CODE (node) != TYPE_DECL)
5987 {
5988 *no_add_attrs = true;
5989 warning (OPT_Wattributes, "%qE attribute ignored",
5990 name);
5991 return NULL_TREE;
5992 }
5993
5994 if (TREE_CODE (node) == TYPE_DECL
5995 && TREE_CODE (TREE_TYPE (node)) != RECORD_TYPE
5996 && TREE_CODE (TREE_TYPE (node)) != UNION_TYPE)
5997 {
5998 *no_add_attrs = true;
5999 warning (OPT_Wattributes, "%qE attribute ignored",
6000 name);
6001 return NULL_TREE;
6002 }
6003
6004 is_dllimport = is_attribute_p ("dllimport", name);
6005
6006 /* Report error on dllimport ambiguities seen now before they cause
6007 any damage. */
6008 if (is_dllimport)
6009 {
6010 /* Honor any target-specific overrides. */
6011 if (!targetm.valid_dllimport_attribute_p (node))
6012 *no_add_attrs = true;
6013
6014 else if (TREE_CODE (node) == FUNCTION_DECL
6015 && DECL_DECLARED_INLINE_P (node))
6016 {
6017 warning (OPT_Wattributes, "inline function %q+D declared as "
6018 " dllimport: attribute ignored", node);
6019 *no_add_attrs = true;
6020 }
6021 /* Like MS, treat definition of dllimported variables and
6022 non-inlined functions on declaration as syntax errors. */
6023 else if (TREE_CODE (node) == FUNCTION_DECL && DECL_INITIAL (node))
6024 {
6025 error ("function %q+D definition is marked dllimport", node);
6026 *no_add_attrs = true;
6027 }
6028
6029 else if (TREE_CODE (node) == VAR_DECL)
6030 {
6031 if (DECL_INITIAL (node))
6032 {
6033 error ("variable %q+D definition is marked dllimport",
6034 node);
6035 *no_add_attrs = true;
6036 }
6037
6038 /* `extern' needn't be specified with dllimport.
6039 Specify `extern' now and hope for the best. Sigh. */
6040 DECL_EXTERNAL (node) = 1;
6041 /* Also, implicitly give dllimport'd variables declared within
6042 a function global scope, unless declared static. */
6043 if (current_function_decl != NULL_TREE && !TREE_STATIC (node))
6044 TREE_PUBLIC (node) = 1;
6045 }
6046
6047 if (*no_add_attrs == false)
6048 DECL_DLLIMPORT_P (node) = 1;
6049 }
6050 else if (TREE_CODE (node) == FUNCTION_DECL
6051 && DECL_DECLARED_INLINE_P (node)
6052 && flag_keep_inline_dllexport)
6053 /* An exported function, even if inline, must be emitted. */
6054 DECL_EXTERNAL (node) = 0;
6055
6056 /* Report error if symbol is not accessible at global scope. */
6057 if (!TREE_PUBLIC (node)
6058 && (TREE_CODE (node) == VAR_DECL
6059 || TREE_CODE (node) == FUNCTION_DECL))
6060 {
6061 error ("external linkage required for symbol %q+D because of "
6062 "%qE attribute", node, name);
6063 *no_add_attrs = true;
6064 }
6065
6066 /* A dllexport'd entity must have default visibility so that other
6067 program units (shared libraries or the main executable) can see
6068 it. A dllimport'd entity must have default visibility so that
6069 the linker knows that undefined references within this program
6070 unit can be resolved by the dynamic linker. */
6071 if (!*no_add_attrs)
6072 {
6073 if (DECL_VISIBILITY_SPECIFIED (node)
6074 && DECL_VISIBILITY (node) != VISIBILITY_DEFAULT)
6075 error ("%qE implies default visibility, but %qD has already "
6076 "been declared with a different visibility",
6077 name, node);
6078 DECL_VISIBILITY (node) = VISIBILITY_DEFAULT;
6079 DECL_VISIBILITY_SPECIFIED (node) = 1;
6080 }
6081
6082 return NULL_TREE;
6083 }
6084
6085 #endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */
6086 \f
6087 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6088 of the various TYPE_QUAL values. */
6089
6090 static void
6091 set_type_quals (tree type, int type_quals)
6092 {
6093 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6094 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6095 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6096 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6097 }
6098
6099 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6100
6101 bool
6102 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6103 {
6104 return (TYPE_QUALS (cand) == type_quals
6105 && TYPE_NAME (cand) == TYPE_NAME (base)
6106 /* Apparently this is needed for Objective-C. */
6107 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6108 /* Check alignment. */
6109 && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
6110 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6111 TYPE_ATTRIBUTES (base)));
6112 }
6113
6114 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6115
6116 static bool
6117 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6118 {
6119 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6120 && TYPE_NAME (cand) == TYPE_NAME (base)
6121 /* Apparently this is needed for Objective-C. */
6122 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6123 /* Check alignment. */
6124 && TYPE_ALIGN (cand) == align
6125 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6126 TYPE_ATTRIBUTES (base)));
6127 }
6128
6129 /* Return a version of the TYPE, qualified as indicated by the
6130 TYPE_QUALS, if one exists. If no qualified version exists yet,
6131 return NULL_TREE. */
6132
6133 tree
6134 get_qualified_type (tree type, int type_quals)
6135 {
6136 tree t;
6137
6138 if (TYPE_QUALS (type) == type_quals)
6139 return type;
6140
6141 /* Search the chain of variants to see if there is already one there just
6142 like the one we need to have. If so, use that existing one. We must
6143 preserve the TYPE_NAME, since there is code that depends on this. */
6144 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6145 if (check_qualified_type (t, type, type_quals))
6146 return t;
6147
6148 return NULL_TREE;
6149 }
6150
6151 /* Like get_qualified_type, but creates the type if it does not
6152 exist. This function never returns NULL_TREE. */
6153
6154 tree
6155 build_qualified_type (tree type, int type_quals)
6156 {
6157 tree t;
6158
6159 /* See if we already have the appropriate qualified variant. */
6160 t = get_qualified_type (type, type_quals);
6161
6162 /* If not, build it. */
6163 if (!t)
6164 {
6165 t = build_variant_type_copy (type);
6166 set_type_quals (t, type_quals);
6167
6168 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6169 /* Propagate structural equality. */
6170 SET_TYPE_STRUCTURAL_EQUALITY (t);
6171 else if (TYPE_CANONICAL (type) != type)
6172 /* Build the underlying canonical type, since it is different
6173 from TYPE. */
6174 TYPE_CANONICAL (t) = build_qualified_type (TYPE_CANONICAL (type),
6175 type_quals);
6176 else
6177 /* T is its own canonical type. */
6178 TYPE_CANONICAL (t) = t;
6179
6180 }
6181
6182 return t;
6183 }
6184
6185 /* Create a variant of type T with alignment ALIGN. */
6186
6187 tree
6188 build_aligned_type (tree type, unsigned int align)
6189 {
6190 tree t;
6191
6192 if (TYPE_PACKED (type)
6193 || TYPE_ALIGN (type) == align)
6194 return type;
6195
6196 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6197 if (check_aligned_type (t, type, align))
6198 return t;
6199
6200 t = build_variant_type_copy (type);
6201 TYPE_ALIGN (t) = align;
6202
6203 return t;
6204 }
6205
6206 /* Create a new distinct copy of TYPE. The new type is made its own
6207 MAIN_VARIANT. If TYPE requires structural equality checks, the
6208 resulting type requires structural equality checks; otherwise, its
6209 TYPE_CANONICAL points to itself. */
6210
6211 tree
6212 build_distinct_type_copy (tree type)
6213 {
6214 tree t = copy_node (type);
6215
6216 TYPE_POINTER_TO (t) = 0;
6217 TYPE_REFERENCE_TO (t) = 0;
6218
6219 /* Set the canonical type either to a new equivalence class, or
6220 propagate the need for structural equality checks. */
6221 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6222 SET_TYPE_STRUCTURAL_EQUALITY (t);
6223 else
6224 TYPE_CANONICAL (t) = t;
6225
6226 /* Make it its own variant. */
6227 TYPE_MAIN_VARIANT (t) = t;
6228 TYPE_NEXT_VARIANT (t) = 0;
6229
6230 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6231 whose TREE_TYPE is not t. This can also happen in the Ada
6232 frontend when using subtypes. */
6233
6234 return t;
6235 }
6236
6237 /* Create a new variant of TYPE, equivalent but distinct. This is so
6238 the caller can modify it. TYPE_CANONICAL for the return type will
6239 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6240 are considered equal by the language itself (or that both types
6241 require structural equality checks). */
6242
6243 tree
6244 build_variant_type_copy (tree type)
6245 {
6246 tree t, m = TYPE_MAIN_VARIANT (type);
6247
6248 t = build_distinct_type_copy (type);
6249
6250 /* Since we're building a variant, assume that it is a non-semantic
6251 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6252 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6253
6254 /* Add the new type to the chain of variants of TYPE. */
6255 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6256 TYPE_NEXT_VARIANT (m) = t;
6257 TYPE_MAIN_VARIANT (t) = m;
6258
6259 return t;
6260 }
6261 \f
6262 /* Return true if the from tree in both tree maps are equal. */
6263
6264 int
6265 tree_map_base_eq (const void *va, const void *vb)
6266 {
6267 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6268 *const b = (const struct tree_map_base *) vb;
6269 return (a->from == b->from);
6270 }
6271
6272 /* Hash a from tree in a tree_base_map. */
6273
6274 unsigned int
6275 tree_map_base_hash (const void *item)
6276 {
6277 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6278 }
6279
6280 /* Return true if this tree map structure is marked for garbage collection
6281 purposes. We simply return true if the from tree is marked, so that this
6282 structure goes away when the from tree goes away. */
6283
6284 int
6285 tree_map_base_marked_p (const void *p)
6286 {
6287 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6288 }
6289
6290 /* Hash a from tree in a tree_map. */
6291
6292 unsigned int
6293 tree_map_hash (const void *item)
6294 {
6295 return (((const struct tree_map *) item)->hash);
6296 }
6297
6298 /* Hash a from tree in a tree_decl_map. */
6299
6300 unsigned int
6301 tree_decl_map_hash (const void *item)
6302 {
6303 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6304 }
6305
6306 /* Return the initialization priority for DECL. */
6307
6308 priority_type
6309 decl_init_priority_lookup (tree decl)
6310 {
6311 struct tree_priority_map *h;
6312 struct tree_map_base in;
6313
6314 gcc_assert (VAR_OR_FUNCTION_DECL_P (decl));
6315 in.from = decl;
6316 h = (struct tree_priority_map *) htab_find (init_priority_for_decl, &in);
6317 return h ? h->init : DEFAULT_INIT_PRIORITY;
6318 }
6319
6320 /* Return the finalization priority for DECL. */
6321
6322 priority_type
6323 decl_fini_priority_lookup (tree decl)
6324 {
6325 struct tree_priority_map *h;
6326 struct tree_map_base in;
6327
6328 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
6329 in.from = decl;
6330 h = (struct tree_priority_map *) htab_find (init_priority_for_decl, &in);
6331 return h ? h->fini : DEFAULT_INIT_PRIORITY;
6332 }
6333
6334 /* Return the initialization and finalization priority information for
6335 DECL. If there is no previous priority information, a freshly
6336 allocated structure is returned. */
6337
6338 static struct tree_priority_map *
6339 decl_priority_info (tree decl)
6340 {
6341 struct tree_priority_map in;
6342 struct tree_priority_map *h;
6343 void **loc;
6344
6345 in.base.from = decl;
6346 loc = htab_find_slot (init_priority_for_decl, &in, INSERT);
6347 h = (struct tree_priority_map *) *loc;
6348 if (!h)
6349 {
6350 h = ggc_alloc_cleared_tree_priority_map ();
6351 *loc = h;
6352 h->base.from = decl;
6353 h->init = DEFAULT_INIT_PRIORITY;
6354 h->fini = DEFAULT_INIT_PRIORITY;
6355 }
6356
6357 return h;
6358 }
6359
6360 /* Set the initialization priority for DECL to PRIORITY. */
6361
6362 void
6363 decl_init_priority_insert (tree decl, priority_type priority)
6364 {
6365 struct tree_priority_map *h;
6366
6367 gcc_assert (VAR_OR_FUNCTION_DECL_P (decl));
6368 if (priority == DEFAULT_INIT_PRIORITY)
6369 return;
6370 h = decl_priority_info (decl);
6371 h->init = priority;
6372 }
6373
6374 /* Set the finalization priority for DECL to PRIORITY. */
6375
6376 void
6377 decl_fini_priority_insert (tree decl, priority_type priority)
6378 {
6379 struct tree_priority_map *h;
6380
6381 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
6382 if (priority == DEFAULT_INIT_PRIORITY)
6383 return;
6384 h = decl_priority_info (decl);
6385 h->fini = priority;
6386 }
6387
6388 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6389
6390 static void
6391 print_debug_expr_statistics (void)
6392 {
6393 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6394 (long) htab_size (debug_expr_for_decl),
6395 (long) htab_elements (debug_expr_for_decl),
6396 htab_collisions (debug_expr_for_decl));
6397 }
6398
6399 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6400
6401 static void
6402 print_value_expr_statistics (void)
6403 {
6404 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6405 (long) htab_size (value_expr_for_decl),
6406 (long) htab_elements (value_expr_for_decl),
6407 htab_collisions (value_expr_for_decl));
6408 }
6409
6410 /* Lookup a debug expression for FROM, and return it if we find one. */
6411
6412 tree
6413 decl_debug_expr_lookup (tree from)
6414 {
6415 struct tree_decl_map *h, in;
6416 in.base.from = from;
6417
6418 h = (struct tree_decl_map *)
6419 htab_find_with_hash (debug_expr_for_decl, &in, DECL_UID (from));
6420 if (h)
6421 return h->to;
6422 return NULL_TREE;
6423 }
6424
6425 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6426
6427 void
6428 decl_debug_expr_insert (tree from, tree to)
6429 {
6430 struct tree_decl_map *h;
6431 void **loc;
6432
6433 h = ggc_alloc_tree_decl_map ();
6434 h->base.from = from;
6435 h->to = to;
6436 loc = htab_find_slot_with_hash (debug_expr_for_decl, h, DECL_UID (from),
6437 INSERT);
6438 *(struct tree_decl_map **) loc = h;
6439 }
6440
6441 /* Lookup a value expression for FROM, and return it if we find one. */
6442
6443 tree
6444 decl_value_expr_lookup (tree from)
6445 {
6446 struct tree_decl_map *h, in;
6447 in.base.from = from;
6448
6449 h = (struct tree_decl_map *)
6450 htab_find_with_hash (value_expr_for_decl, &in, DECL_UID (from));
6451 if (h)
6452 return h->to;
6453 return NULL_TREE;
6454 }
6455
6456 /* Insert a mapping FROM->TO in the value expression hashtable. */
6457
6458 void
6459 decl_value_expr_insert (tree from, tree to)
6460 {
6461 struct tree_decl_map *h;
6462 void **loc;
6463
6464 h = ggc_alloc_tree_decl_map ();
6465 h->base.from = from;
6466 h->to = to;
6467 loc = htab_find_slot_with_hash (value_expr_for_decl, h, DECL_UID (from),
6468 INSERT);
6469 *(struct tree_decl_map **) loc = h;
6470 }
6471
6472 /* Lookup a vector of debug arguments for FROM, and return it if we
6473 find one. */
6474
6475 vec<tree, va_gc> **
6476 decl_debug_args_lookup (tree from)
6477 {
6478 struct tree_vec_map *h, in;
6479
6480 if (!DECL_HAS_DEBUG_ARGS_P (from))
6481 return NULL;
6482 gcc_checking_assert (debug_args_for_decl != NULL);
6483 in.base.from = from;
6484 h = (struct tree_vec_map *)
6485 htab_find_with_hash (debug_args_for_decl, &in, DECL_UID (from));
6486 if (h)
6487 return &h->to;
6488 return NULL;
6489 }
6490
6491 /* Insert a mapping FROM->empty vector of debug arguments in the value
6492 expression hashtable. */
6493
6494 vec<tree, va_gc> **
6495 decl_debug_args_insert (tree from)
6496 {
6497 struct tree_vec_map *h;
6498 void **loc;
6499
6500 if (DECL_HAS_DEBUG_ARGS_P (from))
6501 return decl_debug_args_lookup (from);
6502 if (debug_args_for_decl == NULL)
6503 debug_args_for_decl = htab_create_ggc (64, tree_vec_map_hash,
6504 tree_vec_map_eq, 0);
6505 h = ggc_alloc_tree_vec_map ();
6506 h->base.from = from;
6507 h->to = NULL;
6508 loc = htab_find_slot_with_hash (debug_args_for_decl, h, DECL_UID (from),
6509 INSERT);
6510 *(struct tree_vec_map **) loc = h;
6511 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6512 return &h->to;
6513 }
6514
6515 /* Hashing of types so that we don't make duplicates.
6516 The entry point is `type_hash_canon'. */
6517
6518 /* Compute a hash code for a list of types (chain of TREE_LIST nodes
6519 with types in the TREE_VALUE slots), by adding the hash codes
6520 of the individual types. */
6521
6522 static unsigned int
6523 type_hash_list (const_tree list, hashval_t hashcode)
6524 {
6525 const_tree tail;
6526
6527 for (tail = list; tail; tail = TREE_CHAIN (tail))
6528 if (TREE_VALUE (tail) != error_mark_node)
6529 hashcode = iterative_hash_object (TYPE_HASH (TREE_VALUE (tail)),
6530 hashcode);
6531
6532 return hashcode;
6533 }
6534
6535 /* These are the Hashtable callback functions. */
6536
6537 /* Returns true iff the types are equivalent. */
6538
6539 static int
6540 type_hash_eq (const void *va, const void *vb)
6541 {
6542 const struct type_hash *const a = (const struct type_hash *) va,
6543 *const b = (const struct type_hash *) vb;
6544
6545 /* First test the things that are the same for all types. */
6546 if (a->hash != b->hash
6547 || TREE_CODE (a->type) != TREE_CODE (b->type)
6548 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6549 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6550 TYPE_ATTRIBUTES (b->type))
6551 || (TREE_CODE (a->type) != COMPLEX_TYPE
6552 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6553 return 0;
6554
6555 /* Be careful about comparing arrays before and after the element type
6556 has been completed; don't compare TYPE_ALIGN unless both types are
6557 complete. */
6558 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6559 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6560 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6561 return 0;
6562
6563 switch (TREE_CODE (a->type))
6564 {
6565 case VOID_TYPE:
6566 case COMPLEX_TYPE:
6567 case POINTER_TYPE:
6568 case REFERENCE_TYPE:
6569 case NULLPTR_TYPE:
6570 return 1;
6571
6572 case VECTOR_TYPE:
6573 return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type);
6574
6575 case ENUMERAL_TYPE:
6576 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6577 && !(TYPE_VALUES (a->type)
6578 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6579 && TYPE_VALUES (b->type)
6580 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6581 && type_list_equal (TYPE_VALUES (a->type),
6582 TYPE_VALUES (b->type))))
6583 return 0;
6584
6585 /* ... fall through ... */
6586
6587 case INTEGER_TYPE:
6588 case REAL_TYPE:
6589 case BOOLEAN_TYPE:
6590 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6591 return false;
6592 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6593 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6594 TYPE_MAX_VALUE (b->type)))
6595 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6596 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6597 TYPE_MIN_VALUE (b->type))));
6598
6599 case FIXED_POINT_TYPE:
6600 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6601
6602 case OFFSET_TYPE:
6603 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6604
6605 case METHOD_TYPE:
6606 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6607 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6608 || (TYPE_ARG_TYPES (a->type)
6609 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6610 && TYPE_ARG_TYPES (b->type)
6611 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6612 && type_list_equal (TYPE_ARG_TYPES (a->type),
6613 TYPE_ARG_TYPES (b->type)))))
6614 break;
6615 return 0;
6616 case ARRAY_TYPE:
6617 return TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type);
6618
6619 case RECORD_TYPE:
6620 case UNION_TYPE:
6621 case QUAL_UNION_TYPE:
6622 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6623 || (TYPE_FIELDS (a->type)
6624 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6625 && TYPE_FIELDS (b->type)
6626 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6627 && type_list_equal (TYPE_FIELDS (a->type),
6628 TYPE_FIELDS (b->type))));
6629
6630 case FUNCTION_TYPE:
6631 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6632 || (TYPE_ARG_TYPES (a->type)
6633 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6634 && TYPE_ARG_TYPES (b->type)
6635 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6636 && type_list_equal (TYPE_ARG_TYPES (a->type),
6637 TYPE_ARG_TYPES (b->type))))
6638 break;
6639 return 0;
6640
6641 default:
6642 return 0;
6643 }
6644
6645 if (lang_hooks.types.type_hash_eq != NULL)
6646 return lang_hooks.types.type_hash_eq (a->type, b->type);
6647
6648 return 1;
6649 }
6650
6651 /* Return the cached hash value. */
6652
6653 static hashval_t
6654 type_hash_hash (const void *item)
6655 {
6656 return ((const struct type_hash *) item)->hash;
6657 }
6658
6659 /* Look in the type hash table for a type isomorphic to TYPE.
6660 If one is found, return it. Otherwise return 0. */
6661
6662 static tree
6663 type_hash_lookup (hashval_t hashcode, tree type)
6664 {
6665 struct type_hash *h, in;
6666
6667 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6668 must call that routine before comparing TYPE_ALIGNs. */
6669 layout_type (type);
6670
6671 in.hash = hashcode;
6672 in.type = type;
6673
6674 h = (struct type_hash *) htab_find_with_hash (type_hash_table, &in,
6675 hashcode);
6676 if (h)
6677 return h->type;
6678 return NULL_TREE;
6679 }
6680
6681 /* Add an entry to the type-hash-table
6682 for a type TYPE whose hash code is HASHCODE. */
6683
6684 static void
6685 type_hash_add (hashval_t hashcode, tree type)
6686 {
6687 struct type_hash *h;
6688 void **loc;
6689
6690 h = ggc_alloc_type_hash ();
6691 h->hash = hashcode;
6692 h->type = type;
6693 loc = htab_find_slot_with_hash (type_hash_table, h, hashcode, INSERT);
6694 *loc = (void *)h;
6695 }
6696
6697 /* Given TYPE, and HASHCODE its hash code, return the canonical
6698 object for an identical type if one already exists.
6699 Otherwise, return TYPE, and record it as the canonical object.
6700
6701 To use this function, first create a type of the sort you want.
6702 Then compute its hash code from the fields of the type that
6703 make it different from other similar types.
6704 Then call this function and use the value. */
6705
6706 tree
6707 type_hash_canon (unsigned int hashcode, tree type)
6708 {
6709 tree t1;
6710
6711 /* The hash table only contains main variants, so ensure that's what we're
6712 being passed. */
6713 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6714
6715 /* See if the type is in the hash table already. If so, return it.
6716 Otherwise, add the type. */
6717 t1 = type_hash_lookup (hashcode, type);
6718 if (t1 != 0)
6719 {
6720 if (GATHER_STATISTICS)
6721 {
6722 tree_code_counts[(int) TREE_CODE (type)]--;
6723 tree_node_counts[(int) t_kind]--;
6724 tree_node_sizes[(int) t_kind] -= sizeof (struct tree_type_non_common);
6725 }
6726 return t1;
6727 }
6728 else
6729 {
6730 type_hash_add (hashcode, type);
6731 return type;
6732 }
6733 }
6734
6735 /* See if the data pointed to by the type hash table is marked. We consider
6736 it marked if the type is marked or if a debug type number or symbol
6737 table entry has been made for the type. */
6738
6739 static int
6740 type_hash_marked_p (const void *p)
6741 {
6742 const_tree const type = ((const struct type_hash *) p)->type;
6743
6744 return ggc_marked_p (type);
6745 }
6746
6747 static void
6748 print_type_hash_statistics (void)
6749 {
6750 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
6751 (long) htab_size (type_hash_table),
6752 (long) htab_elements (type_hash_table),
6753 htab_collisions (type_hash_table));
6754 }
6755
6756 /* Compute a hash code for a list of attributes (chain of TREE_LIST nodes
6757 with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots),
6758 by adding the hash codes of the individual attributes. */
6759
6760 static unsigned int
6761 attribute_hash_list (const_tree list, hashval_t hashcode)
6762 {
6763 const_tree tail;
6764
6765 for (tail = list; tail; tail = TREE_CHAIN (tail))
6766 /* ??? Do we want to add in TREE_VALUE too? */
6767 hashcode = iterative_hash_object
6768 (IDENTIFIER_HASH_VALUE (get_attribute_name (tail)), hashcode);
6769 return hashcode;
6770 }
6771
6772 /* Given two lists of attributes, return true if list l2 is
6773 equivalent to l1. */
6774
6775 int
6776 attribute_list_equal (const_tree l1, const_tree l2)
6777 {
6778 if (l1 == l2)
6779 return 1;
6780
6781 return attribute_list_contained (l1, l2)
6782 && attribute_list_contained (l2, l1);
6783 }
6784
6785 /* Given two lists of attributes, return true if list L2 is
6786 completely contained within L1. */
6787 /* ??? This would be faster if attribute names were stored in a canonicalized
6788 form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method
6789 must be used to show these elements are equivalent (which they are). */
6790 /* ??? It's not clear that attributes with arguments will always be handled
6791 correctly. */
6792
6793 int
6794 attribute_list_contained (const_tree l1, const_tree l2)
6795 {
6796 const_tree t1, t2;
6797
6798 /* First check the obvious, maybe the lists are identical. */
6799 if (l1 == l2)
6800 return 1;
6801
6802 /* Maybe the lists are similar. */
6803 for (t1 = l1, t2 = l2;
6804 t1 != 0 && t2 != 0
6805 && get_attribute_name (t1) == get_attribute_name (t2)
6806 && TREE_VALUE (t1) == TREE_VALUE (t2);
6807 t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6808 ;
6809
6810 /* Maybe the lists are equal. */
6811 if (t1 == 0 && t2 == 0)
6812 return 1;
6813
6814 for (; t2 != 0; t2 = TREE_CHAIN (t2))
6815 {
6816 const_tree attr;
6817 /* This CONST_CAST is okay because lookup_attribute does not
6818 modify its argument and the return value is assigned to a
6819 const_tree. */
6820 for (attr = lookup_ident_attribute (get_attribute_name (t2),
6821 CONST_CAST_TREE (l1));
6822 attr != NULL_TREE && !attribute_value_equal (t2, attr);
6823 attr = lookup_ident_attribute (get_attribute_name (t2),
6824 TREE_CHAIN (attr)))
6825 ;
6826
6827 if (attr == NULL_TREE)
6828 return 0;
6829 }
6830
6831 return 1;
6832 }
6833
6834 /* Given two lists of types
6835 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6836 return 1 if the lists contain the same types in the same order.
6837 Also, the TREE_PURPOSEs must match. */
6838
6839 int
6840 type_list_equal (const_tree l1, const_tree l2)
6841 {
6842 const_tree t1, t2;
6843
6844 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6845 if (TREE_VALUE (t1) != TREE_VALUE (t2)
6846 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
6847 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
6848 && (TREE_TYPE (TREE_PURPOSE (t1))
6849 == TREE_TYPE (TREE_PURPOSE (t2))))))
6850 return 0;
6851
6852 return t1 == t2;
6853 }
6854
6855 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
6856 given by TYPE. If the argument list accepts variable arguments,
6857 then this function counts only the ordinary arguments. */
6858
6859 int
6860 type_num_arguments (const_tree type)
6861 {
6862 int i = 0;
6863 tree t;
6864
6865 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
6866 /* If the function does not take a variable number of arguments,
6867 the last element in the list will have type `void'. */
6868 if (VOID_TYPE_P (TREE_VALUE (t)))
6869 break;
6870 else
6871 ++i;
6872
6873 return i;
6874 }
6875
6876 /* Nonzero if integer constants T1 and T2
6877 represent the same constant value. */
6878
6879 int
6880 tree_int_cst_equal (const_tree t1, const_tree t2)
6881 {
6882 if (t1 == t2)
6883 return 1;
6884
6885 if (t1 == 0 || t2 == 0)
6886 return 0;
6887
6888 if (TREE_CODE (t1) == INTEGER_CST
6889 && TREE_CODE (t2) == INTEGER_CST
6890 && wi::to_widest (t1) == wi::to_widest (t2))
6891 return 1;
6892
6893 return 0;
6894 }
6895
6896 /* Nonzero if integer constants T1 and T2 represent values that satisfy <.
6897 The precise way of comparison depends on their data type. */
6898
6899 int
6900 tree_int_cst_lt (const_tree t1, const_tree t2)
6901 {
6902 return INT_CST_LT (t1, t2);
6903 }
6904
6905 /* Returns -1 if T1 < T2, 0 if T1 == T2, and 1 if T1 > T2. */
6906
6907 int
6908 tree_int_cst_compare (const_tree t1, const_tree t2)
6909 {
6910 return wi::cmps (wi::to_widest (t1), wi::to_widest (t2));
6911 }
6912
6913 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
6914 kind INTEGER_CST. This makes sure to properly sign-extend the
6915 constant. */
6916
6917 HOST_WIDE_INT
6918 size_low_cst (const_tree t)
6919 {
6920 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
6921 int prec = TYPE_PRECISION (TREE_TYPE (t));
6922 if (prec < HOST_BITS_PER_WIDE_INT)
6923 return sext_hwi (w, prec);
6924 return w;
6925 }
6926
6927 /* Return the most significant (sign) bit of T. */
6928
6929 int
6930 tree_int_cst_sign_bit (const_tree t)
6931 {
6932 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
6933
6934 return wi::extract_uhwi (t, bitno, 1);
6935 }
6936
6937 /* Return an indication of the sign of the integer constant T.
6938 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
6939 Note that -1 will never be returned if T's type is unsigned. */
6940
6941 int
6942 tree_int_cst_sgn (const_tree t)
6943 {
6944 if (wi::eq_p (t, 0))
6945 return 0;
6946 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
6947 return 1;
6948 else if (wi::neg_p (t))
6949 return -1;
6950 else
6951 return 1;
6952 }
6953
6954 /* Return the minimum number of bits needed to represent VALUE in a
6955 signed or unsigned type, UNSIGNEDP says which. */
6956
6957 unsigned int
6958 tree_int_cst_min_precision (tree value, signop sgn)
6959 {
6960 /* If the value is negative, compute its negative minus 1. The latter
6961 adjustment is because the absolute value of the largest negative value
6962 is one larger than the largest positive value. This is equivalent to
6963 a bit-wise negation, so use that operation instead. */
6964
6965 if (tree_int_cst_sgn (value) < 0)
6966 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
6967
6968 /* Return the number of bits needed, taking into account the fact
6969 that we need one more bit for a signed than unsigned type.
6970 If value is 0 or -1, the minimum precision is 1 no matter
6971 whether unsignedp is true or false. */
6972
6973 if (integer_zerop (value))
6974 return 1;
6975 else
6976 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
6977 }
6978
6979 /* Compare two constructor-element-type constants. Return 1 if the lists
6980 are known to be equal; otherwise return 0. */
6981
6982 int
6983 simple_cst_list_equal (const_tree l1, const_tree l2)
6984 {
6985 while (l1 != NULL_TREE && l2 != NULL_TREE)
6986 {
6987 if (simple_cst_equal (TREE_VALUE (l1), TREE_VALUE (l2)) != 1)
6988 return 0;
6989
6990 l1 = TREE_CHAIN (l1);
6991 l2 = TREE_CHAIN (l2);
6992 }
6993
6994 return l1 == l2;
6995 }
6996
6997 /* Return truthvalue of whether T1 is the same tree structure as T2.
6998 Return 1 if they are the same.
6999 Return 0 if they are understandably different.
7000 Return -1 if either contains tree structure not understood by
7001 this function. */
7002
7003 int
7004 simple_cst_equal (const_tree t1, const_tree t2)
7005 {
7006 enum tree_code code1, code2;
7007 int cmp;
7008 int i;
7009
7010 if (t1 == t2)
7011 return 1;
7012 if (t1 == 0 || t2 == 0)
7013 return 0;
7014
7015 code1 = TREE_CODE (t1);
7016 code2 = TREE_CODE (t2);
7017
7018 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7019 {
7020 if (CONVERT_EXPR_CODE_P (code2)
7021 || code2 == NON_LVALUE_EXPR)
7022 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7023 else
7024 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7025 }
7026
7027 else if (CONVERT_EXPR_CODE_P (code2)
7028 || code2 == NON_LVALUE_EXPR)
7029 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7030
7031 if (code1 != code2)
7032 return 0;
7033
7034 switch (code1)
7035 {
7036 case INTEGER_CST:
7037 return wi::to_widest (t1) == wi::to_widest (t2);
7038
7039 case REAL_CST:
7040 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (t1), TREE_REAL_CST (t2));
7041
7042 case FIXED_CST:
7043 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7044
7045 case STRING_CST:
7046 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7047 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7048 TREE_STRING_LENGTH (t1)));
7049
7050 case CONSTRUCTOR:
7051 {
7052 unsigned HOST_WIDE_INT idx;
7053 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7054 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7055
7056 if (vec_safe_length (v1) != vec_safe_length (v2))
7057 return false;
7058
7059 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7060 /* ??? Should we handle also fields here? */
7061 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7062 return false;
7063 return true;
7064 }
7065
7066 case SAVE_EXPR:
7067 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7068
7069 case CALL_EXPR:
7070 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7071 if (cmp <= 0)
7072 return cmp;
7073 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7074 return 0;
7075 {
7076 const_tree arg1, arg2;
7077 const_call_expr_arg_iterator iter1, iter2;
7078 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7079 arg2 = first_const_call_expr_arg (t2, &iter2);
7080 arg1 && arg2;
7081 arg1 = next_const_call_expr_arg (&iter1),
7082 arg2 = next_const_call_expr_arg (&iter2))
7083 {
7084 cmp = simple_cst_equal (arg1, arg2);
7085 if (cmp <= 0)
7086 return cmp;
7087 }
7088 return arg1 == arg2;
7089 }
7090
7091 case TARGET_EXPR:
7092 /* Special case: if either target is an unallocated VAR_DECL,
7093 it means that it's going to be unified with whatever the
7094 TARGET_EXPR is really supposed to initialize, so treat it
7095 as being equivalent to anything. */
7096 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7097 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7098 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7099 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7100 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7101 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7102 cmp = 1;
7103 else
7104 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7105
7106 if (cmp <= 0)
7107 return cmp;
7108
7109 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7110
7111 case WITH_CLEANUP_EXPR:
7112 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7113 if (cmp <= 0)
7114 return cmp;
7115
7116 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7117
7118 case COMPONENT_REF:
7119 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7120 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7121
7122 return 0;
7123
7124 case VAR_DECL:
7125 case PARM_DECL:
7126 case CONST_DECL:
7127 case FUNCTION_DECL:
7128 return 0;
7129
7130 default:
7131 break;
7132 }
7133
7134 /* This general rule works for most tree codes. All exceptions should be
7135 handled above. If this is a language-specific tree code, we can't
7136 trust what might be in the operand, so say we don't know
7137 the situation. */
7138 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7139 return -1;
7140
7141 switch (TREE_CODE_CLASS (code1))
7142 {
7143 case tcc_unary:
7144 case tcc_binary:
7145 case tcc_comparison:
7146 case tcc_expression:
7147 case tcc_reference:
7148 case tcc_statement:
7149 cmp = 1;
7150 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7151 {
7152 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7153 if (cmp <= 0)
7154 return cmp;
7155 }
7156
7157 return cmp;
7158
7159 default:
7160 return -1;
7161 }
7162 }
7163
7164 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7165 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7166 than U, respectively. */
7167
7168 int
7169 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7170 {
7171 if (tree_int_cst_sgn (t) < 0)
7172 return -1;
7173 else if (!cst_fits_uhwi_p (t))
7174 return 1;
7175 else if ((unsigned HOST_WIDE_INT) tree_to_hwi (t) == u)
7176 return 0;
7177 else if ((unsigned HOST_WIDE_INT) tree_to_hwi (t) < u)
7178 return -1;
7179 else
7180 return 1;
7181 }
7182
7183 /* Return true if SIZE represents a constant size that is in bounds of
7184 what the middle-end and the backend accepts (covering not more than
7185 half of the address-space). */
7186
7187 bool
7188 valid_constant_size_p (const_tree size)
7189 {
7190 if (! tree_fits_uhwi_p (size)
7191 || TREE_OVERFLOW (size)
7192 || tree_int_cst_sign_bit (size) != 0)
7193 return false;
7194 return true;
7195 }
7196
7197 /* Return the precision of the type, or for a complex or vector type the
7198 precision of the type of its elements. */
7199
7200 unsigned int
7201 element_precision (const_tree type)
7202 {
7203 enum tree_code code = TREE_CODE (type);
7204 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7205 type = TREE_TYPE (type);
7206
7207 return TYPE_PRECISION (type);
7208 }
7209
7210 /* Return true if CODE represents an associative tree code. Otherwise
7211 return false. */
7212 bool
7213 associative_tree_code (enum tree_code code)
7214 {
7215 switch (code)
7216 {
7217 case BIT_IOR_EXPR:
7218 case BIT_AND_EXPR:
7219 case BIT_XOR_EXPR:
7220 case PLUS_EXPR:
7221 case MULT_EXPR:
7222 case MIN_EXPR:
7223 case MAX_EXPR:
7224 return true;
7225
7226 default:
7227 break;
7228 }
7229 return false;
7230 }
7231
7232 /* Return true if CODE represents a commutative tree code. Otherwise
7233 return false. */
7234 bool
7235 commutative_tree_code (enum tree_code code)
7236 {
7237 switch (code)
7238 {
7239 case PLUS_EXPR:
7240 case MULT_EXPR:
7241 case MULT_HIGHPART_EXPR:
7242 case MIN_EXPR:
7243 case MAX_EXPR:
7244 case BIT_IOR_EXPR:
7245 case BIT_XOR_EXPR:
7246 case BIT_AND_EXPR:
7247 case NE_EXPR:
7248 case EQ_EXPR:
7249 case UNORDERED_EXPR:
7250 case ORDERED_EXPR:
7251 case UNEQ_EXPR:
7252 case LTGT_EXPR:
7253 case TRUTH_AND_EXPR:
7254 case TRUTH_XOR_EXPR:
7255 case TRUTH_OR_EXPR:
7256 case WIDEN_MULT_EXPR:
7257 case VEC_WIDEN_MULT_HI_EXPR:
7258 case VEC_WIDEN_MULT_LO_EXPR:
7259 case VEC_WIDEN_MULT_EVEN_EXPR:
7260 case VEC_WIDEN_MULT_ODD_EXPR:
7261 return true;
7262
7263 default:
7264 break;
7265 }
7266 return false;
7267 }
7268
7269 /* Return true if CODE represents a ternary tree code for which the
7270 first two operands are commutative. Otherwise return false. */
7271 bool
7272 commutative_ternary_tree_code (enum tree_code code)
7273 {
7274 switch (code)
7275 {
7276 case WIDEN_MULT_PLUS_EXPR:
7277 case WIDEN_MULT_MINUS_EXPR:
7278 return true;
7279
7280 default:
7281 break;
7282 }
7283 return false;
7284 }
7285
7286 /* Generate a hash value for an expression. This can be used iteratively
7287 by passing a previous result as the VAL argument.
7288
7289 This function is intended to produce the same hash for expressions which
7290 would compare equal using operand_equal_p. */
7291
7292 hashval_t
7293 iterative_hash_expr (const_tree t, hashval_t val)
7294 {
7295 int i;
7296 enum tree_code code;
7297 char tclass;
7298
7299 if (t == NULL_TREE)
7300 return iterative_hash_hashval_t (0, val);
7301
7302 code = TREE_CODE (t);
7303
7304 switch (code)
7305 {
7306 /* Alas, constants aren't shared, so we can't rely on pointer
7307 identity. */
7308 case INTEGER_CST:
7309 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
7310 val = iterative_hash_host_wide_int (TREE_INT_CST_ELT (t, i), val);
7311 return val;
7312 case REAL_CST:
7313 {
7314 unsigned int val2 = real_hash (TREE_REAL_CST_PTR (t));
7315
7316 return iterative_hash_hashval_t (val2, val);
7317 }
7318 case FIXED_CST:
7319 {
7320 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7321
7322 return iterative_hash_hashval_t (val2, val);
7323 }
7324 case STRING_CST:
7325 return iterative_hash (TREE_STRING_POINTER (t),
7326 TREE_STRING_LENGTH (t), val);
7327 case COMPLEX_CST:
7328 val = iterative_hash_expr (TREE_REALPART (t), val);
7329 return iterative_hash_expr (TREE_IMAGPART (t), val);
7330 case VECTOR_CST:
7331 {
7332 unsigned i;
7333 for (i = 0; i < VECTOR_CST_NELTS (t); ++i)
7334 val = iterative_hash_expr (VECTOR_CST_ELT (t, i), val);
7335 return val;
7336 }
7337 case SSA_NAME:
7338 /* We can just compare by pointer. */
7339 return iterative_hash_host_wide_int (SSA_NAME_VERSION (t), val);
7340 case PLACEHOLDER_EXPR:
7341 /* The node itself doesn't matter. */
7342 return val;
7343 case TREE_LIST:
7344 /* A list of expressions, for a CALL_EXPR or as the elements of a
7345 VECTOR_CST. */
7346 for (; t; t = TREE_CHAIN (t))
7347 val = iterative_hash_expr (TREE_VALUE (t), val);
7348 return val;
7349 case CONSTRUCTOR:
7350 {
7351 unsigned HOST_WIDE_INT idx;
7352 tree field, value;
7353 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7354 {
7355 val = iterative_hash_expr (field, val);
7356 val = iterative_hash_expr (value, val);
7357 }
7358 return val;
7359 }
7360 case FUNCTION_DECL:
7361 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7362 Otherwise nodes that compare equal according to operand_equal_p might
7363 get different hash codes. However, don't do this for machine specific
7364 or front end builtins, since the function code is overloaded in those
7365 cases. */
7366 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7367 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7368 {
7369 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7370 code = TREE_CODE (t);
7371 }
7372 /* FALL THROUGH */
7373 default:
7374 tclass = TREE_CODE_CLASS (code);
7375
7376 if (tclass == tcc_declaration)
7377 {
7378 /* DECL's have a unique ID */
7379 val = iterative_hash_host_wide_int (DECL_UID (t), val);
7380 }
7381 else
7382 {
7383 gcc_assert (IS_EXPR_CODE_CLASS (tclass));
7384
7385 val = iterative_hash_object (code, val);
7386
7387 /* Don't hash the type, that can lead to having nodes which
7388 compare equal according to operand_equal_p, but which
7389 have different hash codes. */
7390 if (CONVERT_EXPR_CODE_P (code)
7391 || code == NON_LVALUE_EXPR)
7392 {
7393 /* Make sure to include signness in the hash computation. */
7394 val += TYPE_UNSIGNED (TREE_TYPE (t));
7395 val = iterative_hash_expr (TREE_OPERAND (t, 0), val);
7396 }
7397
7398 else if (commutative_tree_code (code))
7399 {
7400 /* It's a commutative expression. We want to hash it the same
7401 however it appears. We do this by first hashing both operands
7402 and then rehashing based on the order of their independent
7403 hashes. */
7404 hashval_t one = iterative_hash_expr (TREE_OPERAND (t, 0), 0);
7405 hashval_t two = iterative_hash_expr (TREE_OPERAND (t, 1), 0);
7406 hashval_t t;
7407
7408 if (one > two)
7409 t = one, one = two, two = t;
7410
7411 val = iterative_hash_hashval_t (one, val);
7412 val = iterative_hash_hashval_t (two, val);
7413 }
7414 else
7415 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
7416 val = iterative_hash_expr (TREE_OPERAND (t, i), val);
7417 }
7418 return val;
7419 }
7420 }
7421
7422 /* Generate a hash value for a pair of expressions. This can be used
7423 iteratively by passing a previous result as the VAL argument.
7424
7425 The same hash value is always returned for a given pair of expressions,
7426 regardless of the order in which they are presented. This is useful in
7427 hashing the operands of commutative functions. */
7428
7429 hashval_t
7430 iterative_hash_exprs_commutative (const_tree t1,
7431 const_tree t2, hashval_t val)
7432 {
7433 hashval_t one = iterative_hash_expr (t1, 0);
7434 hashval_t two = iterative_hash_expr (t2, 0);
7435 hashval_t t;
7436
7437 if (one > two)
7438 t = one, one = two, two = t;
7439 val = iterative_hash_hashval_t (one, val);
7440 val = iterative_hash_hashval_t (two, val);
7441
7442 return val;
7443 }
7444 \f
7445 /* Constructors for pointer, array and function types.
7446 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7447 constructed by language-dependent code, not here.) */
7448
7449 /* Construct, lay out and return the type of pointers to TO_TYPE with
7450 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7451 reference all of memory. If such a type has already been
7452 constructed, reuse it. */
7453
7454 tree
7455 build_pointer_type_for_mode (tree to_type, enum machine_mode mode,
7456 bool can_alias_all)
7457 {
7458 tree t;
7459
7460 if (to_type == error_mark_node)
7461 return error_mark_node;
7462
7463 /* If the pointed-to type has the may_alias attribute set, force
7464 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7465 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7466 can_alias_all = true;
7467
7468 /* In some cases, languages will have things that aren't a POINTER_TYPE
7469 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7470 In that case, return that type without regard to the rest of our
7471 operands.
7472
7473 ??? This is a kludge, but consistent with the way this function has
7474 always operated and there doesn't seem to be a good way to avoid this
7475 at the moment. */
7476 if (TYPE_POINTER_TO (to_type) != 0
7477 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7478 return TYPE_POINTER_TO (to_type);
7479
7480 /* First, if we already have a type for pointers to TO_TYPE and it's
7481 the proper mode, use it. */
7482 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7483 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7484 return t;
7485
7486 t = make_node (POINTER_TYPE);
7487
7488 TREE_TYPE (t) = to_type;
7489 SET_TYPE_MODE (t, mode);
7490 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7491 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7492 TYPE_POINTER_TO (to_type) = t;
7493
7494 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7495 SET_TYPE_STRUCTURAL_EQUALITY (t);
7496 else if (TYPE_CANONICAL (to_type) != to_type)
7497 TYPE_CANONICAL (t)
7498 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7499 mode, can_alias_all);
7500
7501 /* Lay out the type. This function has many callers that are concerned
7502 with expression-construction, and this simplifies them all. */
7503 layout_type (t);
7504
7505 return t;
7506 }
7507
7508 /* By default build pointers in ptr_mode. */
7509
7510 tree
7511 build_pointer_type (tree to_type)
7512 {
7513 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7514 : TYPE_ADDR_SPACE (to_type);
7515 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7516 return build_pointer_type_for_mode (to_type, pointer_mode, false);
7517 }
7518
7519 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7520
7521 tree
7522 build_reference_type_for_mode (tree to_type, enum machine_mode mode,
7523 bool can_alias_all)
7524 {
7525 tree t;
7526
7527 if (to_type == error_mark_node)
7528 return error_mark_node;
7529
7530 /* If the pointed-to type has the may_alias attribute set, force
7531 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7532 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7533 can_alias_all = true;
7534
7535 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7536 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7537 In that case, return that type without regard to the rest of our
7538 operands.
7539
7540 ??? This is a kludge, but consistent with the way this function has
7541 always operated and there doesn't seem to be a good way to avoid this
7542 at the moment. */
7543 if (TYPE_REFERENCE_TO (to_type) != 0
7544 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7545 return TYPE_REFERENCE_TO (to_type);
7546
7547 /* First, if we already have a type for pointers to TO_TYPE and it's
7548 the proper mode, use it. */
7549 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7550 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7551 return t;
7552
7553 t = make_node (REFERENCE_TYPE);
7554
7555 TREE_TYPE (t) = to_type;
7556 SET_TYPE_MODE (t, mode);
7557 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7558 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7559 TYPE_REFERENCE_TO (to_type) = t;
7560
7561 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7562 SET_TYPE_STRUCTURAL_EQUALITY (t);
7563 else if (TYPE_CANONICAL (to_type) != to_type)
7564 TYPE_CANONICAL (t)
7565 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7566 mode, can_alias_all);
7567
7568 layout_type (t);
7569
7570 return t;
7571 }
7572
7573
7574 /* Build the node for the type of references-to-TO_TYPE by default
7575 in ptr_mode. */
7576
7577 tree
7578 build_reference_type (tree to_type)
7579 {
7580 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7581 : TYPE_ADDR_SPACE (to_type);
7582 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7583 return build_reference_type_for_mode (to_type, pointer_mode, false);
7584 }
7585
7586 /* Build a type that is compatible with t but has no cv quals anywhere
7587 in its type, thus
7588
7589 const char *const *const * -> char ***. */
7590
7591 tree
7592 build_type_no_quals (tree t)
7593 {
7594 switch (TREE_CODE (t))
7595 {
7596 case POINTER_TYPE:
7597 return build_pointer_type_for_mode (build_type_no_quals (TREE_TYPE (t)),
7598 TYPE_MODE (t),
7599 TYPE_REF_CAN_ALIAS_ALL (t));
7600 case REFERENCE_TYPE:
7601 return
7602 build_reference_type_for_mode (build_type_no_quals (TREE_TYPE (t)),
7603 TYPE_MODE (t),
7604 TYPE_REF_CAN_ALIAS_ALL (t));
7605 default:
7606 return TYPE_MAIN_VARIANT (t);
7607 }
7608 }
7609
7610 #define MAX_INT_CACHED_PREC \
7611 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7612 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7613
7614 /* Builds a signed or unsigned integer type of precision PRECISION.
7615 Used for C bitfields whose precision does not match that of
7616 built-in target types. */
7617 tree
7618 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7619 int unsignedp)
7620 {
7621 tree itype, ret;
7622
7623 if (unsignedp)
7624 unsignedp = MAX_INT_CACHED_PREC + 1;
7625
7626 if (precision <= MAX_INT_CACHED_PREC)
7627 {
7628 itype = nonstandard_integer_type_cache[precision + unsignedp];
7629 if (itype)
7630 return itype;
7631 }
7632
7633 itype = make_node (INTEGER_TYPE);
7634 TYPE_PRECISION (itype) = precision;
7635
7636 if (unsignedp)
7637 fixup_unsigned_type (itype);
7638 else
7639 fixup_signed_type (itype);
7640
7641 ret = itype;
7642 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype)))
7643 ret = type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype)), itype);
7644 if (precision <= MAX_INT_CACHED_PREC)
7645 nonstandard_integer_type_cache[precision + unsignedp] = ret;
7646
7647 return ret;
7648 }
7649
7650 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7651 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7652 is true, reuse such a type that has already been constructed. */
7653
7654 static tree
7655 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7656 {
7657 tree itype = make_node (INTEGER_TYPE);
7658 hashval_t hashcode = 0;
7659
7660 TREE_TYPE (itype) = type;
7661
7662 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7663 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7664
7665 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7666 SET_TYPE_MODE (itype, TYPE_MODE (type));
7667 TYPE_SIZE (itype) = TYPE_SIZE (type);
7668 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7669 TYPE_ALIGN (itype) = TYPE_ALIGN (type);
7670 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7671
7672 if (!shared)
7673 return itype;
7674
7675 if ((TYPE_MIN_VALUE (itype)
7676 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7677 || (TYPE_MAX_VALUE (itype)
7678 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7679 {
7680 /* Since we cannot reliably merge this type, we need to compare it using
7681 structural equality checks. */
7682 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7683 return itype;
7684 }
7685
7686 hashcode = iterative_hash_expr (TYPE_MIN_VALUE (itype), hashcode);
7687 hashcode = iterative_hash_expr (TYPE_MAX_VALUE (itype), hashcode);
7688 hashcode = iterative_hash_hashval_t (TYPE_HASH (type), hashcode);
7689 itype = type_hash_canon (hashcode, itype);
7690
7691 return itype;
7692 }
7693
7694 /* Wrapper around build_range_type_1 with SHARED set to true. */
7695
7696 tree
7697 build_range_type (tree type, tree lowval, tree highval)
7698 {
7699 return build_range_type_1 (type, lowval, highval, true);
7700 }
7701
7702 /* Wrapper around build_range_type_1 with SHARED set to false. */
7703
7704 tree
7705 build_nonshared_range_type (tree type, tree lowval, tree highval)
7706 {
7707 return build_range_type_1 (type, lowval, highval, false);
7708 }
7709
7710 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7711 MAXVAL should be the maximum value in the domain
7712 (one less than the length of the array).
7713
7714 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7715 We don't enforce this limit, that is up to caller (e.g. language front end).
7716 The limit exists because the result is a signed type and we don't handle
7717 sizes that use more than one HOST_WIDE_INT. */
7718
7719 tree
7720 build_index_type (tree maxval)
7721 {
7722 return build_range_type (sizetype, size_zero_node, maxval);
7723 }
7724
7725 /* Return true if the debug information for TYPE, a subtype, should be emitted
7726 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7727 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7728 debug info and doesn't reflect the source code. */
7729
7730 bool
7731 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7732 {
7733 tree base_type = TREE_TYPE (type), low, high;
7734
7735 /* Subrange types have a base type which is an integral type. */
7736 if (!INTEGRAL_TYPE_P (base_type))
7737 return false;
7738
7739 /* Get the real bounds of the subtype. */
7740 if (lang_hooks.types.get_subrange_bounds)
7741 lang_hooks.types.get_subrange_bounds (type, &low, &high);
7742 else
7743 {
7744 low = TYPE_MIN_VALUE (type);
7745 high = TYPE_MAX_VALUE (type);
7746 }
7747
7748 /* If the type and its base type have the same representation and the same
7749 name, then the type is not a subrange but a copy of the base type. */
7750 if ((TREE_CODE (base_type) == INTEGER_TYPE
7751 || TREE_CODE (base_type) == BOOLEAN_TYPE)
7752 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
7753 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
7754 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type)))
7755 {
7756 tree type_name = TYPE_NAME (type);
7757 tree base_type_name = TYPE_NAME (base_type);
7758
7759 if (type_name && TREE_CODE (type_name) == TYPE_DECL)
7760 type_name = DECL_NAME (type_name);
7761
7762 if (base_type_name && TREE_CODE (base_type_name) == TYPE_DECL)
7763 base_type_name = DECL_NAME (base_type_name);
7764
7765 if (type_name == base_type_name)
7766 return false;
7767 }
7768
7769 if (lowval)
7770 *lowval = low;
7771 if (highval)
7772 *highval = high;
7773 return true;
7774 }
7775
7776 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7777 and number of elements specified by the range of values of INDEX_TYPE.
7778 If SHARED is true, reuse such a type that has already been constructed. */
7779
7780 static tree
7781 build_array_type_1 (tree elt_type, tree index_type, bool shared)
7782 {
7783 tree t;
7784
7785 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
7786 {
7787 error ("arrays of functions are not meaningful");
7788 elt_type = integer_type_node;
7789 }
7790
7791 t = make_node (ARRAY_TYPE);
7792 TREE_TYPE (t) = elt_type;
7793 TYPE_DOMAIN (t) = index_type;
7794 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
7795 layout_type (t);
7796
7797 /* If the element type is incomplete at this point we get marked for
7798 structural equality. Do not record these types in the canonical
7799 type hashtable. */
7800 if (TYPE_STRUCTURAL_EQUALITY_P (t))
7801 return t;
7802
7803 if (shared)
7804 {
7805 hashval_t hashcode = iterative_hash_object (TYPE_HASH (elt_type), 0);
7806 if (index_type)
7807 hashcode = iterative_hash_object (TYPE_HASH (index_type), hashcode);
7808 t = type_hash_canon (hashcode, t);
7809 }
7810
7811 if (TYPE_CANONICAL (t) == t)
7812 {
7813 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
7814 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type)))
7815 SET_TYPE_STRUCTURAL_EQUALITY (t);
7816 else if (TYPE_CANONICAL (elt_type) != elt_type
7817 || (index_type && TYPE_CANONICAL (index_type) != index_type))
7818 TYPE_CANONICAL (t)
7819 = build_array_type_1 (TYPE_CANONICAL (elt_type),
7820 index_type
7821 ? TYPE_CANONICAL (index_type) : NULL_TREE,
7822 shared);
7823 }
7824
7825 return t;
7826 }
7827
7828 /* Wrapper around build_array_type_1 with SHARED set to true. */
7829
7830 tree
7831 build_array_type (tree elt_type, tree index_type)
7832 {
7833 return build_array_type_1 (elt_type, index_type, true);
7834 }
7835
7836 /* Wrapper around build_array_type_1 with SHARED set to false. */
7837
7838 tree
7839 build_nonshared_array_type (tree elt_type, tree index_type)
7840 {
7841 return build_array_type_1 (elt_type, index_type, false);
7842 }
7843
7844 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7845 sizetype. */
7846
7847 tree
7848 build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
7849 {
7850 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
7851 }
7852
7853 /* Recursively examines the array elements of TYPE, until a non-array
7854 element type is found. */
7855
7856 tree
7857 strip_array_types (tree type)
7858 {
7859 while (TREE_CODE (type) == ARRAY_TYPE)
7860 type = TREE_TYPE (type);
7861
7862 return type;
7863 }
7864
7865 /* Computes the canonical argument types from the argument type list
7866 ARGTYPES.
7867
7868 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7869 on entry to this function, or if any of the ARGTYPES are
7870 structural.
7871
7872 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7873 true on entry to this function, or if any of the ARGTYPES are
7874 non-canonical.
7875
7876 Returns a canonical argument list, which may be ARGTYPES when the
7877 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7878 true) or would not differ from ARGTYPES. */
7879
7880 static tree
7881 maybe_canonicalize_argtypes (tree argtypes,
7882 bool *any_structural_p,
7883 bool *any_noncanonical_p)
7884 {
7885 tree arg;
7886 bool any_noncanonical_argtypes_p = false;
7887
7888 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
7889 {
7890 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
7891 /* Fail gracefully by stating that the type is structural. */
7892 *any_structural_p = true;
7893 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
7894 *any_structural_p = true;
7895 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
7896 || TREE_PURPOSE (arg))
7897 /* If the argument has a default argument, we consider it
7898 non-canonical even though the type itself is canonical.
7899 That way, different variants of function and method types
7900 with default arguments will all point to the variant with
7901 no defaults as their canonical type. */
7902 any_noncanonical_argtypes_p = true;
7903 }
7904
7905 if (*any_structural_p)
7906 return argtypes;
7907
7908 if (any_noncanonical_argtypes_p)
7909 {
7910 /* Build the canonical list of argument types. */
7911 tree canon_argtypes = NULL_TREE;
7912 bool is_void = false;
7913
7914 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
7915 {
7916 if (arg == void_list_node)
7917 is_void = true;
7918 else
7919 canon_argtypes = tree_cons (NULL_TREE,
7920 TYPE_CANONICAL (TREE_VALUE (arg)),
7921 canon_argtypes);
7922 }
7923
7924 canon_argtypes = nreverse (canon_argtypes);
7925 if (is_void)
7926 canon_argtypes = chainon (canon_argtypes, void_list_node);
7927
7928 /* There is a non-canonical type. */
7929 *any_noncanonical_p = true;
7930 return canon_argtypes;
7931 }
7932
7933 /* The canonical argument types are the same as ARGTYPES. */
7934 return argtypes;
7935 }
7936
7937 /* Construct, lay out and return
7938 the type of functions returning type VALUE_TYPE
7939 given arguments of types ARG_TYPES.
7940 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
7941 are data type nodes for the arguments of the function.
7942 If such a type has already been constructed, reuse it. */
7943
7944 tree
7945 build_function_type (tree value_type, tree arg_types)
7946 {
7947 tree t;
7948 hashval_t hashcode = 0;
7949 bool any_structural_p, any_noncanonical_p;
7950 tree canon_argtypes;
7951
7952 if (TREE_CODE (value_type) == FUNCTION_TYPE)
7953 {
7954 error ("function return type cannot be function");
7955 value_type = integer_type_node;
7956 }
7957
7958 /* Make a node of the sort we want. */
7959 t = make_node (FUNCTION_TYPE);
7960 TREE_TYPE (t) = value_type;
7961 TYPE_ARG_TYPES (t) = arg_types;
7962
7963 /* If we already have such a type, use the old one. */
7964 hashcode = iterative_hash_object (TYPE_HASH (value_type), hashcode);
7965 hashcode = type_hash_list (arg_types, hashcode);
7966 t = type_hash_canon (hashcode, t);
7967
7968 /* Set up the canonical type. */
7969 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
7970 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
7971 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
7972 &any_structural_p,
7973 &any_noncanonical_p);
7974 if (any_structural_p)
7975 SET_TYPE_STRUCTURAL_EQUALITY (t);
7976 else if (any_noncanonical_p)
7977 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
7978 canon_argtypes);
7979
7980 if (!COMPLETE_TYPE_P (t))
7981 layout_type (t);
7982 return t;
7983 }
7984
7985 /* Build variant of function type ORIG_TYPE skipping ARGS_TO_SKIP and the
7986 return value if SKIP_RETURN is true. */
7987
7988 static tree
7989 build_function_type_skip_args (tree orig_type, bitmap args_to_skip,
7990 bool skip_return)
7991 {
7992 tree new_type = NULL;
7993 tree args, new_args = NULL, t;
7994 tree new_reversed;
7995 int i = 0;
7996
7997 for (args = TYPE_ARG_TYPES (orig_type); args && args != void_list_node;
7998 args = TREE_CHAIN (args), i++)
7999 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
8000 new_args = tree_cons (NULL_TREE, TREE_VALUE (args), new_args);
8001
8002 new_reversed = nreverse (new_args);
8003 if (args)
8004 {
8005 if (new_reversed)
8006 TREE_CHAIN (new_args) = void_list_node;
8007 else
8008 new_reversed = void_list_node;
8009 }
8010
8011 /* Use copy_node to preserve as much as possible from original type
8012 (debug info, attribute lists etc.)
8013 Exception is METHOD_TYPEs must have THIS argument.
8014 When we are asked to remove it, we need to build new FUNCTION_TYPE
8015 instead. */
8016 if (TREE_CODE (orig_type) != METHOD_TYPE
8017 || !args_to_skip
8018 || !bitmap_bit_p (args_to_skip, 0))
8019 {
8020 new_type = build_distinct_type_copy (orig_type);
8021 TYPE_ARG_TYPES (new_type) = new_reversed;
8022 }
8023 else
8024 {
8025 new_type
8026 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
8027 new_reversed));
8028 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
8029 }
8030
8031 if (skip_return)
8032 TREE_TYPE (new_type) = void_type_node;
8033
8034 /* This is a new type, not a copy of an old type. Need to reassociate
8035 variants. We can handle everything except the main variant lazily. */
8036 t = TYPE_MAIN_VARIANT (orig_type);
8037 if (t != orig_type)
8038 {
8039 t = build_function_type_skip_args (t, args_to_skip, skip_return);
8040 TYPE_MAIN_VARIANT (new_type) = t;
8041 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
8042 TYPE_NEXT_VARIANT (t) = new_type;
8043 }
8044 else
8045 {
8046 TYPE_MAIN_VARIANT (new_type) = new_type;
8047 TYPE_NEXT_VARIANT (new_type) = NULL;
8048 }
8049
8050 return new_type;
8051 }
8052
8053 /* Build variant of function decl ORIG_DECL skipping ARGS_TO_SKIP and the
8054 return value if SKIP_RETURN is true.
8055
8056 Arguments from DECL_ARGUMENTS list can't be removed now, since they are
8057 linked by TREE_CHAIN directly. The caller is responsible for eliminating
8058 them when they are being duplicated (i.e. copy_arguments_for_versioning). */
8059
8060 tree
8061 build_function_decl_skip_args (tree orig_decl, bitmap args_to_skip,
8062 bool skip_return)
8063 {
8064 tree new_decl = copy_node (orig_decl);
8065 tree new_type;
8066
8067 new_type = TREE_TYPE (orig_decl);
8068 if (prototype_p (new_type)
8069 || (skip_return && !VOID_TYPE_P (TREE_TYPE (new_type))))
8070 new_type
8071 = build_function_type_skip_args (new_type, args_to_skip, skip_return);
8072 TREE_TYPE (new_decl) = new_type;
8073
8074 /* For declarations setting DECL_VINDEX (i.e. methods)
8075 we expect first argument to be THIS pointer. */
8076 if (args_to_skip && bitmap_bit_p (args_to_skip, 0))
8077 DECL_VINDEX (new_decl) = NULL_TREE;
8078
8079 /* When signature changes, we need to clear builtin info. */
8080 if (DECL_BUILT_IN (new_decl)
8081 && args_to_skip
8082 && !bitmap_empty_p (args_to_skip))
8083 {
8084 DECL_BUILT_IN_CLASS (new_decl) = NOT_BUILT_IN;
8085 DECL_FUNCTION_CODE (new_decl) = (enum built_in_function) 0;
8086 }
8087 return new_decl;
8088 }
8089
8090 /* Build a function type. The RETURN_TYPE is the type returned by the
8091 function. If VAARGS is set, no void_type_node is appended to the
8092 the list. ARGP must be always be terminated be a NULL_TREE. */
8093
8094 static tree
8095 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8096 {
8097 tree t, args, last;
8098
8099 t = va_arg (argp, tree);
8100 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8101 args = tree_cons (NULL_TREE, t, args);
8102
8103 if (vaargs)
8104 {
8105 last = args;
8106 if (args != NULL_TREE)
8107 args = nreverse (args);
8108 gcc_assert (last != void_list_node);
8109 }
8110 else if (args == NULL_TREE)
8111 args = void_list_node;
8112 else
8113 {
8114 last = args;
8115 args = nreverse (args);
8116 TREE_CHAIN (last) = void_list_node;
8117 }
8118 args = build_function_type (return_type, args);
8119
8120 return args;
8121 }
8122
8123 /* Build a function type. The RETURN_TYPE is the type returned by the
8124 function. If additional arguments are provided, they are
8125 additional argument types. The list of argument types must always
8126 be terminated by NULL_TREE. */
8127
8128 tree
8129 build_function_type_list (tree return_type, ...)
8130 {
8131 tree args;
8132 va_list p;
8133
8134 va_start (p, return_type);
8135 args = build_function_type_list_1 (false, return_type, p);
8136 va_end (p);
8137 return args;
8138 }
8139
8140 /* Build a variable argument function type. The RETURN_TYPE is the
8141 type returned by the function. If additional arguments are provided,
8142 they are additional argument types. The list of argument types must
8143 always be terminated by NULL_TREE. */
8144
8145 tree
8146 build_varargs_function_type_list (tree return_type, ...)
8147 {
8148 tree args;
8149 va_list p;
8150
8151 va_start (p, return_type);
8152 args = build_function_type_list_1 (true, return_type, p);
8153 va_end (p);
8154
8155 return args;
8156 }
8157
8158 /* Build a function type. RETURN_TYPE is the type returned by the
8159 function; VAARGS indicates whether the function takes varargs. The
8160 function takes N named arguments, the types of which are provided in
8161 ARG_TYPES. */
8162
8163 static tree
8164 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8165 tree *arg_types)
8166 {
8167 int i;
8168 tree t = vaargs ? NULL_TREE : void_list_node;
8169
8170 for (i = n - 1; i >= 0; i--)
8171 t = tree_cons (NULL_TREE, arg_types[i], t);
8172
8173 return build_function_type (return_type, t);
8174 }
8175
8176 /* Build a function type. RETURN_TYPE is the type returned by the
8177 function. The function takes N named arguments, the types of which
8178 are provided in ARG_TYPES. */
8179
8180 tree
8181 build_function_type_array (tree return_type, int n, tree *arg_types)
8182 {
8183 return build_function_type_array_1 (false, return_type, n, arg_types);
8184 }
8185
8186 /* Build a variable argument function type. RETURN_TYPE is the type
8187 returned by the function. The function takes N named arguments, the
8188 types of which are provided in ARG_TYPES. */
8189
8190 tree
8191 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8192 {
8193 return build_function_type_array_1 (true, return_type, n, arg_types);
8194 }
8195
8196 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8197 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8198 for the method. An implicit additional parameter (of type
8199 pointer-to-BASETYPE) is added to the ARGTYPES. */
8200
8201 tree
8202 build_method_type_directly (tree basetype,
8203 tree rettype,
8204 tree argtypes)
8205 {
8206 tree t;
8207 tree ptype;
8208 int hashcode = 0;
8209 bool any_structural_p, any_noncanonical_p;
8210 tree canon_argtypes;
8211
8212 /* Make a node of the sort we want. */
8213 t = make_node (METHOD_TYPE);
8214
8215 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8216 TREE_TYPE (t) = rettype;
8217 ptype = build_pointer_type (basetype);
8218
8219 /* The actual arglist for this function includes a "hidden" argument
8220 which is "this". Put it into the list of argument types. */
8221 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8222 TYPE_ARG_TYPES (t) = argtypes;
8223
8224 /* If we already have such a type, use the old one. */
8225 hashcode = iterative_hash_object (TYPE_HASH (basetype), hashcode);
8226 hashcode = iterative_hash_object (TYPE_HASH (rettype), hashcode);
8227 hashcode = type_hash_list (argtypes, hashcode);
8228 t = type_hash_canon (hashcode, t);
8229
8230 /* Set up the canonical type. */
8231 any_structural_p
8232 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8233 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8234 any_noncanonical_p
8235 = (TYPE_CANONICAL (basetype) != basetype
8236 || TYPE_CANONICAL (rettype) != rettype);
8237 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8238 &any_structural_p,
8239 &any_noncanonical_p);
8240 if (any_structural_p)
8241 SET_TYPE_STRUCTURAL_EQUALITY (t);
8242 else if (any_noncanonical_p)
8243 TYPE_CANONICAL (t)
8244 = build_method_type_directly (TYPE_CANONICAL (basetype),
8245 TYPE_CANONICAL (rettype),
8246 canon_argtypes);
8247 if (!COMPLETE_TYPE_P (t))
8248 layout_type (t);
8249
8250 return t;
8251 }
8252
8253 /* Construct, lay out and return the type of methods belonging to class
8254 BASETYPE and whose arguments and values are described by TYPE.
8255 If that type exists already, reuse it.
8256 TYPE must be a FUNCTION_TYPE node. */
8257
8258 tree
8259 build_method_type (tree basetype, tree type)
8260 {
8261 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8262
8263 return build_method_type_directly (basetype,
8264 TREE_TYPE (type),
8265 TYPE_ARG_TYPES (type));
8266 }
8267
8268 /* Construct, lay out and return the type of offsets to a value
8269 of type TYPE, within an object of type BASETYPE.
8270 If a suitable offset type exists already, reuse it. */
8271
8272 tree
8273 build_offset_type (tree basetype, tree type)
8274 {
8275 tree t;
8276 hashval_t hashcode = 0;
8277
8278 /* Make a node of the sort we want. */
8279 t = make_node (OFFSET_TYPE);
8280
8281 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8282 TREE_TYPE (t) = type;
8283
8284 /* If we already have such a type, use the old one. */
8285 hashcode = iterative_hash_object (TYPE_HASH (basetype), hashcode);
8286 hashcode = iterative_hash_object (TYPE_HASH (type), hashcode);
8287 t = type_hash_canon (hashcode, t);
8288
8289 if (!COMPLETE_TYPE_P (t))
8290 layout_type (t);
8291
8292 if (TYPE_CANONICAL (t) == t)
8293 {
8294 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8295 || TYPE_STRUCTURAL_EQUALITY_P (type))
8296 SET_TYPE_STRUCTURAL_EQUALITY (t);
8297 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8298 || TYPE_CANONICAL (type) != type)
8299 TYPE_CANONICAL (t)
8300 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8301 TYPE_CANONICAL (type));
8302 }
8303
8304 return t;
8305 }
8306
8307 /* Create a complex type whose components are COMPONENT_TYPE. */
8308
8309 tree
8310 build_complex_type (tree component_type)
8311 {
8312 tree t;
8313 hashval_t hashcode;
8314
8315 gcc_assert (INTEGRAL_TYPE_P (component_type)
8316 || SCALAR_FLOAT_TYPE_P (component_type)
8317 || FIXED_POINT_TYPE_P (component_type));
8318
8319 /* Make a node of the sort we want. */
8320 t = make_node (COMPLEX_TYPE);
8321
8322 TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type);
8323
8324 /* If we already have such a type, use the old one. */
8325 hashcode = iterative_hash_object (TYPE_HASH (component_type), 0);
8326 t = type_hash_canon (hashcode, t);
8327
8328 if (!COMPLETE_TYPE_P (t))
8329 layout_type (t);
8330
8331 if (TYPE_CANONICAL (t) == t)
8332 {
8333 if (TYPE_STRUCTURAL_EQUALITY_P (component_type))
8334 SET_TYPE_STRUCTURAL_EQUALITY (t);
8335 else if (TYPE_CANONICAL (component_type) != component_type)
8336 TYPE_CANONICAL (t)
8337 = build_complex_type (TYPE_CANONICAL (component_type));
8338 }
8339
8340 /* We need to create a name, since complex is a fundamental type. */
8341 if (! TYPE_NAME (t))
8342 {
8343 const char *name;
8344 if (component_type == char_type_node)
8345 name = "complex char";
8346 else if (component_type == signed_char_type_node)
8347 name = "complex signed char";
8348 else if (component_type == unsigned_char_type_node)
8349 name = "complex unsigned char";
8350 else if (component_type == short_integer_type_node)
8351 name = "complex short int";
8352 else if (component_type == short_unsigned_type_node)
8353 name = "complex short unsigned int";
8354 else if (component_type == integer_type_node)
8355 name = "complex int";
8356 else if (component_type == unsigned_type_node)
8357 name = "complex unsigned int";
8358 else if (component_type == long_integer_type_node)
8359 name = "complex long int";
8360 else if (component_type == long_unsigned_type_node)
8361 name = "complex long unsigned int";
8362 else if (component_type == long_long_integer_type_node)
8363 name = "complex long long int";
8364 else if (component_type == long_long_unsigned_type_node)
8365 name = "complex long long unsigned int";
8366 else
8367 name = 0;
8368
8369 if (name != 0)
8370 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8371 get_identifier (name), t);
8372 }
8373
8374 return build_qualified_type (t, TYPE_QUALS (component_type));
8375 }
8376
8377 /* If TYPE is a real or complex floating-point type and the target
8378 does not directly support arithmetic on TYPE then return the wider
8379 type to be used for arithmetic on TYPE. Otherwise, return
8380 NULL_TREE. */
8381
8382 tree
8383 excess_precision_type (tree type)
8384 {
8385 if (flag_excess_precision != EXCESS_PRECISION_FAST)
8386 {
8387 int flt_eval_method = TARGET_FLT_EVAL_METHOD;
8388 switch (TREE_CODE (type))
8389 {
8390 case REAL_TYPE:
8391 switch (flt_eval_method)
8392 {
8393 case 1:
8394 if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
8395 return double_type_node;
8396 break;
8397 case 2:
8398 if (TYPE_MODE (type) == TYPE_MODE (float_type_node)
8399 || TYPE_MODE (type) == TYPE_MODE (double_type_node))
8400 return long_double_type_node;
8401 break;
8402 default:
8403 gcc_unreachable ();
8404 }
8405 break;
8406 case COMPLEX_TYPE:
8407 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8408 return NULL_TREE;
8409 switch (flt_eval_method)
8410 {
8411 case 1:
8412 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node))
8413 return complex_double_type_node;
8414 break;
8415 case 2:
8416 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node)
8417 || (TYPE_MODE (TREE_TYPE (type))
8418 == TYPE_MODE (double_type_node)))
8419 return complex_long_double_type_node;
8420 break;
8421 default:
8422 gcc_unreachable ();
8423 }
8424 break;
8425 default:
8426 break;
8427 }
8428 }
8429 return NULL_TREE;
8430 }
8431 \f
8432 /* Return OP, stripped of any conversions to wider types as much as is safe.
8433 Converting the value back to OP's type makes a value equivalent to OP.
8434
8435 If FOR_TYPE is nonzero, we return a value which, if converted to
8436 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8437
8438 OP must have integer, real or enumeral type. Pointers are not allowed!
8439
8440 There are some cases where the obvious value we could return
8441 would regenerate to OP if converted to OP's type,
8442 but would not extend like OP to wider types.
8443 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8444 For example, if OP is (unsigned short)(signed char)-1,
8445 we avoid returning (signed char)-1 if FOR_TYPE is int,
8446 even though extending that to an unsigned short would regenerate OP,
8447 since the result of extending (signed char)-1 to (int)
8448 is different from (int) OP. */
8449
8450 tree
8451 get_unwidened (tree op, tree for_type)
8452 {
8453 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8454 tree type = TREE_TYPE (op);
8455 unsigned final_prec
8456 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8457 int uns
8458 = (for_type != 0 && for_type != type
8459 && final_prec > TYPE_PRECISION (type)
8460 && TYPE_UNSIGNED (type));
8461 tree win = op;
8462
8463 while (CONVERT_EXPR_P (op))
8464 {
8465 int bitschange;
8466
8467 /* TYPE_PRECISION on vector types has different meaning
8468 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8469 so avoid them here. */
8470 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8471 break;
8472
8473 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8474 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8475
8476 /* Truncations are many-one so cannot be removed.
8477 Unless we are later going to truncate down even farther. */
8478 if (bitschange < 0
8479 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8480 break;
8481
8482 /* See what's inside this conversion. If we decide to strip it,
8483 we will set WIN. */
8484 op = TREE_OPERAND (op, 0);
8485
8486 /* If we have not stripped any zero-extensions (uns is 0),
8487 we can strip any kind of extension.
8488 If we have previously stripped a zero-extension,
8489 only zero-extensions can safely be stripped.
8490 Any extension can be stripped if the bits it would produce
8491 are all going to be discarded later by truncating to FOR_TYPE. */
8492
8493 if (bitschange > 0)
8494 {
8495 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8496 win = op;
8497 /* TYPE_UNSIGNED says whether this is a zero-extension.
8498 Let's avoid computing it if it does not affect WIN
8499 and if UNS will not be needed again. */
8500 if ((uns
8501 || CONVERT_EXPR_P (op))
8502 && TYPE_UNSIGNED (TREE_TYPE (op)))
8503 {
8504 uns = 1;
8505 win = op;
8506 }
8507 }
8508 }
8509
8510 /* If we finally reach a constant see if it fits in for_type and
8511 in that case convert it. */
8512 if (for_type
8513 && TREE_CODE (win) == INTEGER_CST
8514 && TREE_TYPE (win) != for_type
8515 && int_fits_type_p (win, for_type))
8516 win = fold_convert (for_type, win);
8517
8518 return win;
8519 }
8520 \f
8521 /* Return OP or a simpler expression for a narrower value
8522 which can be sign-extended or zero-extended to give back OP.
8523 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8524 or 0 if the value should be sign-extended. */
8525
8526 tree
8527 get_narrower (tree op, int *unsignedp_ptr)
8528 {
8529 int uns = 0;
8530 int first = 1;
8531 tree win = op;
8532 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8533
8534 while (TREE_CODE (op) == NOP_EXPR)
8535 {
8536 int bitschange
8537 = (TYPE_PRECISION (TREE_TYPE (op))
8538 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8539
8540 /* Truncations are many-one so cannot be removed. */
8541 if (bitschange < 0)
8542 break;
8543
8544 /* See what's inside this conversion. If we decide to strip it,
8545 we will set WIN. */
8546
8547 if (bitschange > 0)
8548 {
8549 op = TREE_OPERAND (op, 0);
8550 /* An extension: the outermost one can be stripped,
8551 but remember whether it is zero or sign extension. */
8552 if (first)
8553 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8554 /* Otherwise, if a sign extension has been stripped,
8555 only sign extensions can now be stripped;
8556 if a zero extension has been stripped, only zero-extensions. */
8557 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8558 break;
8559 first = 0;
8560 }
8561 else /* bitschange == 0 */
8562 {
8563 /* A change in nominal type can always be stripped, but we must
8564 preserve the unsignedness. */
8565 if (first)
8566 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8567 first = 0;
8568 op = TREE_OPERAND (op, 0);
8569 /* Keep trying to narrow, but don't assign op to win if it
8570 would turn an integral type into something else. */
8571 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8572 continue;
8573 }
8574
8575 win = op;
8576 }
8577
8578 if (TREE_CODE (op) == COMPONENT_REF
8579 /* Since type_for_size always gives an integer type. */
8580 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8581 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8582 /* Ensure field is laid out already. */
8583 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8584 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8585 {
8586 unsigned HOST_WIDE_INT innerprec
8587 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8588 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8589 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8590 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8591
8592 /* We can get this structure field in a narrower type that fits it,
8593 but the resulting extension to its nominal type (a fullword type)
8594 must satisfy the same conditions as for other extensions.
8595
8596 Do this only for fields that are aligned (not bit-fields),
8597 because when bit-field insns will be used there is no
8598 advantage in doing this. */
8599
8600 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8601 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8602 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8603 && type != 0)
8604 {
8605 if (first)
8606 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8607 win = fold_convert (type, op);
8608 }
8609 }
8610
8611 *unsignedp_ptr = uns;
8612 return win;
8613 }
8614 \f
8615 /* Returns true if integer constant C has a value that is permissible
8616 for type TYPE (an INTEGER_TYPE). */
8617
8618 bool
8619 int_fits_type_p (const_tree c, const_tree type)
8620 {
8621 tree type_low_bound, type_high_bound;
8622 bool ok_for_low_bound, ok_for_high_bound;
8623 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8624
8625 retry:
8626 type_low_bound = TYPE_MIN_VALUE (type);
8627 type_high_bound = TYPE_MAX_VALUE (type);
8628
8629 /* If at least one bound of the type is a constant integer, we can check
8630 ourselves and maybe make a decision. If no such decision is possible, but
8631 this type is a subtype, try checking against that. Otherwise, use
8632 fits_to_tree_p, which checks against the precision.
8633
8634 Compute the status for each possibly constant bound, and return if we see
8635 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8636 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8637 for "constant known to fit". */
8638
8639 /* Check if c >= type_low_bound. */
8640 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8641 {
8642 if (INT_CST_LT (c, type_low_bound))
8643 return false;
8644 ok_for_low_bound = true;
8645 }
8646 else
8647 ok_for_low_bound = false;
8648
8649 /* Check if c <= type_high_bound. */
8650 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8651 {
8652 if (INT_CST_LT (type_high_bound, c))
8653 return false;
8654 ok_for_high_bound = true;
8655 }
8656 else
8657 ok_for_high_bound = false;
8658
8659 /* If the constant fits both bounds, the result is known. */
8660 if (ok_for_low_bound && ok_for_high_bound)
8661 return true;
8662
8663 /* Perform some generic filtering which may allow making a decision
8664 even if the bounds are not constant. First, negative integers
8665 never fit in unsigned types, */
8666 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (c))
8667 return false;
8668
8669 /* Second, narrower types always fit in wider ones. */
8670 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8671 return true;
8672
8673 /* Third, unsigned integers with top bit set never fit signed types. */
8674 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED && wi::neg_p (c))
8675 return false;
8676
8677 /* If we haven't been able to decide at this point, there nothing more we
8678 can check ourselves here. Look at the base type if we have one and it
8679 has the same precision. */
8680 if (TREE_CODE (type) == INTEGER_TYPE
8681 && TREE_TYPE (type) != 0
8682 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8683 {
8684 type = TREE_TYPE (type);
8685 goto retry;
8686 }
8687
8688 /* Or to fits_to_tree_p, if nothing else. */
8689 return wi::fits_to_tree_p (c, type);
8690 }
8691
8692 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8693 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8694 represented (assuming two's-complement arithmetic) within the bit
8695 precision of the type are returned instead. */
8696
8697 void
8698 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8699 {
8700 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8701 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8702 wi::to_mpz (TYPE_MIN_VALUE (type), min, TYPE_SIGN (type));
8703 else
8704 {
8705 if (TYPE_UNSIGNED (type))
8706 mpz_set_ui (min, 0);
8707 else
8708 {
8709 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8710 wi::to_mpz (mn, min, SIGNED);
8711 }
8712 }
8713
8714 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8715 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8716 wi::to_mpz (TYPE_MAX_VALUE (type), max, TYPE_SIGN (type));
8717 else
8718 {
8719 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8720 wi::to_mpz (mn, max, TYPE_SIGN (type));
8721 }
8722 }
8723
8724 /* Return true if VAR is an automatic variable defined in function FN. */
8725
8726 bool
8727 auto_var_in_fn_p (const_tree var, const_tree fn)
8728 {
8729 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8730 && ((((TREE_CODE (var) == VAR_DECL && ! DECL_EXTERNAL (var))
8731 || TREE_CODE (var) == PARM_DECL)
8732 && ! TREE_STATIC (var))
8733 || TREE_CODE (var) == LABEL_DECL
8734 || TREE_CODE (var) == RESULT_DECL));
8735 }
8736
8737 /* Subprogram of following function. Called by walk_tree.
8738
8739 Return *TP if it is an automatic variable or parameter of the
8740 function passed in as DATA. */
8741
8742 static tree
8743 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8744 {
8745 tree fn = (tree) data;
8746
8747 if (TYPE_P (*tp))
8748 *walk_subtrees = 0;
8749
8750 else if (DECL_P (*tp)
8751 && auto_var_in_fn_p (*tp, fn))
8752 return *tp;
8753
8754 return NULL_TREE;
8755 }
8756
8757 /* Returns true if T is, contains, or refers to a type with variable
8758 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8759 arguments, but not the return type. If FN is nonzero, only return
8760 true if a modifier of the type or position of FN is a variable or
8761 parameter inside FN.
8762
8763 This concept is more general than that of C99 'variably modified types':
8764 in C99, a struct type is never variably modified because a VLA may not
8765 appear as a structure member. However, in GNU C code like:
8766
8767 struct S { int i[f()]; };
8768
8769 is valid, and other languages may define similar constructs. */
8770
8771 bool
8772 variably_modified_type_p (tree type, tree fn)
8773 {
8774 tree t;
8775
8776 /* Test if T is either variable (if FN is zero) or an expression containing
8777 a variable in FN. If TYPE isn't gimplified, return true also if
8778 gimplify_one_sizepos would gimplify the expression into a local
8779 variable. */
8780 #define RETURN_TRUE_IF_VAR(T) \
8781 do { tree _t = (T); \
8782 if (_t != NULL_TREE \
8783 && _t != error_mark_node \
8784 && TREE_CODE (_t) != INTEGER_CST \
8785 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8786 && (!fn \
8787 || (!TYPE_SIZES_GIMPLIFIED (type) \
8788 && !is_gimple_sizepos (_t)) \
8789 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8790 return true; } while (0)
8791
8792 if (type == error_mark_node)
8793 return false;
8794
8795 /* If TYPE itself has variable size, it is variably modified. */
8796 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8797 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8798
8799 switch (TREE_CODE (type))
8800 {
8801 case POINTER_TYPE:
8802 case REFERENCE_TYPE:
8803 case VECTOR_TYPE:
8804 if (variably_modified_type_p (TREE_TYPE (type), fn))
8805 return true;
8806 break;
8807
8808 case FUNCTION_TYPE:
8809 case METHOD_TYPE:
8810 /* If TYPE is a function type, it is variably modified if the
8811 return type is variably modified. */
8812 if (variably_modified_type_p (TREE_TYPE (type), fn))
8813 return true;
8814 break;
8815
8816 case INTEGER_TYPE:
8817 case REAL_TYPE:
8818 case FIXED_POINT_TYPE:
8819 case ENUMERAL_TYPE:
8820 case BOOLEAN_TYPE:
8821 /* Scalar types are variably modified if their end points
8822 aren't constant. */
8823 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8824 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8825 break;
8826
8827 case RECORD_TYPE:
8828 case UNION_TYPE:
8829 case QUAL_UNION_TYPE:
8830 /* We can't see if any of the fields are variably-modified by the
8831 definition we normally use, since that would produce infinite
8832 recursion via pointers. */
8833 /* This is variably modified if some field's type is. */
8834 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8835 if (TREE_CODE (t) == FIELD_DECL)
8836 {
8837 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8838 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8839 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8840
8841 if (TREE_CODE (type) == QUAL_UNION_TYPE)
8842 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
8843 }
8844 break;
8845
8846 case ARRAY_TYPE:
8847 /* Do not call ourselves to avoid infinite recursion. This is
8848 variably modified if the element type is. */
8849 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
8850 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
8851 break;
8852
8853 default:
8854 break;
8855 }
8856
8857 /* The current language may have other cases to check, but in general,
8858 all other types are not variably modified. */
8859 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
8860
8861 #undef RETURN_TRUE_IF_VAR
8862 }
8863
8864 /* Given a DECL or TYPE, return the scope in which it was declared, or
8865 NULL_TREE if there is no containing scope. */
8866
8867 tree
8868 get_containing_scope (const_tree t)
8869 {
8870 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
8871 }
8872
8873 /* Return the innermost context enclosing DECL that is
8874 a FUNCTION_DECL, or zero if none. */
8875
8876 tree
8877 decl_function_context (const_tree decl)
8878 {
8879 tree context;
8880
8881 if (TREE_CODE (decl) == ERROR_MARK)
8882 return 0;
8883
8884 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8885 where we look up the function at runtime. Such functions always take
8886 a first argument of type 'pointer to real context'.
8887
8888 C++ should really be fixed to use DECL_CONTEXT for the real context,
8889 and use something else for the "virtual context". */
8890 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
8891 context
8892 = TYPE_MAIN_VARIANT
8893 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
8894 else
8895 context = DECL_CONTEXT (decl);
8896
8897 while (context && TREE_CODE (context) != FUNCTION_DECL)
8898 {
8899 if (TREE_CODE (context) == BLOCK)
8900 context = BLOCK_SUPERCONTEXT (context);
8901 else
8902 context = get_containing_scope (context);
8903 }
8904
8905 return context;
8906 }
8907
8908 /* Return the innermost context enclosing DECL that is
8909 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8910 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8911
8912 tree
8913 decl_type_context (const_tree decl)
8914 {
8915 tree context = DECL_CONTEXT (decl);
8916
8917 while (context)
8918 switch (TREE_CODE (context))
8919 {
8920 case NAMESPACE_DECL:
8921 case TRANSLATION_UNIT_DECL:
8922 return NULL_TREE;
8923
8924 case RECORD_TYPE:
8925 case UNION_TYPE:
8926 case QUAL_UNION_TYPE:
8927 return context;
8928
8929 case TYPE_DECL:
8930 case FUNCTION_DECL:
8931 context = DECL_CONTEXT (context);
8932 break;
8933
8934 case BLOCK:
8935 context = BLOCK_SUPERCONTEXT (context);
8936 break;
8937
8938 default:
8939 gcc_unreachable ();
8940 }
8941
8942 return NULL_TREE;
8943 }
8944
8945 /* CALL is a CALL_EXPR. Return the declaration for the function
8946 called, or NULL_TREE if the called function cannot be
8947 determined. */
8948
8949 tree
8950 get_callee_fndecl (const_tree call)
8951 {
8952 tree addr;
8953
8954 if (call == error_mark_node)
8955 return error_mark_node;
8956
8957 /* It's invalid to call this function with anything but a
8958 CALL_EXPR. */
8959 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8960
8961 /* The first operand to the CALL is the address of the function
8962 called. */
8963 addr = CALL_EXPR_FN (call);
8964
8965 STRIP_NOPS (addr);
8966
8967 /* If this is a readonly function pointer, extract its initial value. */
8968 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
8969 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
8970 && DECL_INITIAL (addr))
8971 addr = DECL_INITIAL (addr);
8972
8973 /* If the address is just `&f' for some function `f', then we know
8974 that `f' is being called. */
8975 if (TREE_CODE (addr) == ADDR_EXPR
8976 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
8977 return TREE_OPERAND (addr, 0);
8978
8979 /* We couldn't figure out what was being called. */
8980 return NULL_TREE;
8981 }
8982
8983 /* Print debugging information about tree nodes generated during the compile,
8984 and any language-specific information. */
8985
8986 void
8987 dump_tree_statistics (void)
8988 {
8989 if (GATHER_STATISTICS)
8990 {
8991 int i;
8992 int total_nodes, total_bytes;
8993 fprintf (stderr, "Kind Nodes Bytes\n");
8994 fprintf (stderr, "---------------------------------------\n");
8995 total_nodes = total_bytes = 0;
8996 for (i = 0; i < (int) all_kinds; i++)
8997 {
8998 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i],
8999 tree_node_counts[i], tree_node_sizes[i]);
9000 total_nodes += tree_node_counts[i];
9001 total_bytes += tree_node_sizes[i];
9002 }
9003 fprintf (stderr, "---------------------------------------\n");
9004 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes);
9005 fprintf (stderr, "---------------------------------------\n");
9006 fprintf (stderr, "Code Nodes\n");
9007 fprintf (stderr, "----------------------------\n");
9008 for (i = 0; i < (int) MAX_TREE_CODES; i++)
9009 fprintf (stderr, "%-20s %7d\n", get_tree_code_name ((enum tree_code) i),
9010 tree_code_counts[i]);
9011 fprintf (stderr, "----------------------------\n");
9012 ssanames_print_statistics ();
9013 phinodes_print_statistics ();
9014 }
9015 else
9016 fprintf (stderr, "(No per-node statistics)\n");
9017
9018 print_type_hash_statistics ();
9019 print_debug_expr_statistics ();
9020 print_value_expr_statistics ();
9021 lang_hooks.print_statistics ();
9022 }
9023 \f
9024 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9025
9026 /* Generate a crc32 of a byte. */
9027
9028 static unsigned
9029 crc32_unsigned_bits (unsigned chksum, unsigned value, unsigned bits)
9030 {
9031 unsigned ix;
9032
9033 for (ix = bits; ix--; value <<= 1)
9034 {
9035 unsigned feedback;
9036
9037 feedback = (value ^ chksum) & 0x80000000 ? 0x04c11db7 : 0;
9038 chksum <<= 1;
9039 chksum ^= feedback;
9040 }
9041 return chksum;
9042 }
9043
9044 /* Generate a crc32 of a 32-bit unsigned. */
9045
9046 unsigned
9047 crc32_unsigned (unsigned chksum, unsigned value)
9048 {
9049 return crc32_unsigned_bits (chksum, value, 32);
9050 }
9051
9052 /* Generate a crc32 of a byte. */
9053
9054 unsigned
9055 crc32_byte (unsigned chksum, char byte)
9056 {
9057 return crc32_unsigned_bits (chksum, (unsigned) byte << 24, 8);
9058 }
9059
9060 /* Generate a crc32 of a string. */
9061
9062 unsigned
9063 crc32_string (unsigned chksum, const char *string)
9064 {
9065 do
9066 {
9067 chksum = crc32_byte (chksum, *string);
9068 }
9069 while (*string++);
9070 return chksum;
9071 }
9072
9073 /* P is a string that will be used in a symbol. Mask out any characters
9074 that are not valid in that context. */
9075
9076 void
9077 clean_symbol_name (char *p)
9078 {
9079 for (; *p; p++)
9080 if (! (ISALNUM (*p)
9081 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9082 || *p == '$'
9083 #endif
9084 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9085 || *p == '.'
9086 #endif
9087 ))
9088 *p = '_';
9089 }
9090
9091 /* Generate a name for a special-purpose function.
9092 The generated name may need to be unique across the whole link.
9093 Changes to this function may also require corresponding changes to
9094 xstrdup_mask_random.
9095 TYPE is some string to identify the purpose of this function to the
9096 linker or collect2; it must start with an uppercase letter,
9097 one of:
9098 I - for constructors
9099 D - for destructors
9100 N - for C++ anonymous namespaces
9101 F - for DWARF unwind frame information. */
9102
9103 tree
9104 get_file_function_name (const char *type)
9105 {
9106 char *buf;
9107 const char *p;
9108 char *q;
9109
9110 /* If we already have a name we know to be unique, just use that. */
9111 if (first_global_object_name)
9112 p = q = ASTRDUP (first_global_object_name);
9113 /* If the target is handling the constructors/destructors, they
9114 will be local to this file and the name is only necessary for
9115 debugging purposes.
9116 We also assign sub_I and sub_D sufixes to constructors called from
9117 the global static constructors. These are always local. */
9118 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9119 || (strncmp (type, "sub_", 4) == 0
9120 && (type[4] == 'I' || type[4] == 'D')))
9121 {
9122 const char *file = main_input_filename;
9123 if (! file)
9124 file = input_filename;
9125 /* Just use the file's basename, because the full pathname
9126 might be quite long. */
9127 p = q = ASTRDUP (lbasename (file));
9128 }
9129 else
9130 {
9131 /* Otherwise, the name must be unique across the entire link.
9132 We don't have anything that we know to be unique to this translation
9133 unit, so use what we do have and throw in some randomness. */
9134 unsigned len;
9135 const char *name = weak_global_object_name;
9136 const char *file = main_input_filename;
9137
9138 if (! name)
9139 name = "";
9140 if (! file)
9141 file = input_filename;
9142
9143 len = strlen (file);
9144 q = (char *) alloca (9 + 17 + len + 1);
9145 memcpy (q, file, len + 1);
9146
9147 snprintf (q + len, 9 + 17 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9148 crc32_string (0, name), get_random_seed (false));
9149
9150 p = q;
9151 }
9152
9153 clean_symbol_name (q);
9154 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9155 + strlen (type));
9156
9157 /* Set up the name of the file-level functions we may need.
9158 Use a global object (which is already required to be unique over
9159 the program) rather than the file name (which imposes extra
9160 constraints). */
9161 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9162
9163 return get_identifier (buf);
9164 }
9165 \f
9166 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9167
9168 /* Complain that the tree code of NODE does not match the expected 0
9169 terminated list of trailing codes. The trailing code list can be
9170 empty, for a more vague error message. FILE, LINE, and FUNCTION
9171 are of the caller. */
9172
9173 void
9174 tree_check_failed (const_tree node, const char *file,
9175 int line, const char *function, ...)
9176 {
9177 va_list args;
9178 const char *buffer;
9179 unsigned length = 0;
9180 enum tree_code code;
9181
9182 va_start (args, function);
9183 while ((code = (enum tree_code) va_arg (args, int)))
9184 length += 4 + strlen (get_tree_code_name (code));
9185 va_end (args);
9186 if (length)
9187 {
9188 char *tmp;
9189 va_start (args, function);
9190 length += strlen ("expected ");
9191 buffer = tmp = (char *) alloca (length);
9192 length = 0;
9193 while ((code = (enum tree_code) va_arg (args, int)))
9194 {
9195 const char *prefix = length ? " or " : "expected ";
9196
9197 strcpy (tmp + length, prefix);
9198 length += strlen (prefix);
9199 strcpy (tmp + length, get_tree_code_name (code));
9200 length += strlen (get_tree_code_name (code));
9201 }
9202 va_end (args);
9203 }
9204 else
9205 buffer = "unexpected node";
9206
9207 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9208 buffer, get_tree_code_name (TREE_CODE (node)),
9209 function, trim_filename (file), line);
9210 }
9211
9212 /* Complain that the tree code of NODE does match the expected 0
9213 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9214 the caller. */
9215
9216 void
9217 tree_not_check_failed (const_tree node, const char *file,
9218 int line, const char *function, ...)
9219 {
9220 va_list args;
9221 char *buffer;
9222 unsigned length = 0;
9223 enum tree_code code;
9224
9225 va_start (args, function);
9226 while ((code = (enum tree_code) va_arg (args, int)))
9227 length += 4 + strlen (get_tree_code_name (code));
9228 va_end (args);
9229 va_start (args, function);
9230 buffer = (char *) alloca (length);
9231 length = 0;
9232 while ((code = (enum tree_code) va_arg (args, int)))
9233 {
9234 if (length)
9235 {
9236 strcpy (buffer + length, " or ");
9237 length += 4;
9238 }
9239 strcpy (buffer + length, get_tree_code_name (code));
9240 length += strlen (get_tree_code_name (code));
9241 }
9242 va_end (args);
9243
9244 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9245 buffer, get_tree_code_name (TREE_CODE (node)),
9246 function, trim_filename (file), line);
9247 }
9248
9249 /* Similar to tree_check_failed, except that we check for a class of tree
9250 code, given in CL. */
9251
9252 void
9253 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9254 const char *file, int line, const char *function)
9255 {
9256 internal_error
9257 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9258 TREE_CODE_CLASS_STRING (cl),
9259 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9260 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9261 }
9262
9263 /* Similar to tree_check_failed, except that instead of specifying a
9264 dozen codes, use the knowledge that they're all sequential. */
9265
9266 void
9267 tree_range_check_failed (const_tree node, const char *file, int line,
9268 const char *function, enum tree_code c1,
9269 enum tree_code c2)
9270 {
9271 char *buffer;
9272 unsigned length = 0;
9273 unsigned int c;
9274
9275 for (c = c1; c <= c2; ++c)
9276 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9277
9278 length += strlen ("expected ");
9279 buffer = (char *) alloca (length);
9280 length = 0;
9281
9282 for (c = c1; c <= c2; ++c)
9283 {
9284 const char *prefix = length ? " or " : "expected ";
9285
9286 strcpy (buffer + length, prefix);
9287 length += strlen (prefix);
9288 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9289 length += strlen (get_tree_code_name ((enum tree_code) c));
9290 }
9291
9292 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9293 buffer, get_tree_code_name (TREE_CODE (node)),
9294 function, trim_filename (file), line);
9295 }
9296
9297
9298 /* Similar to tree_check_failed, except that we check that a tree does
9299 not have the specified code, given in CL. */
9300
9301 void
9302 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9303 const char *file, int line, const char *function)
9304 {
9305 internal_error
9306 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9307 TREE_CODE_CLASS_STRING (cl),
9308 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9309 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9310 }
9311
9312
9313 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9314
9315 void
9316 omp_clause_check_failed (const_tree node, const char *file, int line,
9317 const char *function, enum omp_clause_code code)
9318 {
9319 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9320 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9321 function, trim_filename (file), line);
9322 }
9323
9324
9325 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9326
9327 void
9328 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9329 const char *function, enum omp_clause_code c1,
9330 enum omp_clause_code c2)
9331 {
9332 char *buffer;
9333 unsigned length = 0;
9334 unsigned int c;
9335
9336 for (c = c1; c <= c2; ++c)
9337 length += 4 + strlen (omp_clause_code_name[c]);
9338
9339 length += strlen ("expected ");
9340 buffer = (char *) alloca (length);
9341 length = 0;
9342
9343 for (c = c1; c <= c2; ++c)
9344 {
9345 const char *prefix = length ? " or " : "expected ";
9346
9347 strcpy (buffer + length, prefix);
9348 length += strlen (prefix);
9349 strcpy (buffer + length, omp_clause_code_name[c]);
9350 length += strlen (omp_clause_code_name[c]);
9351 }
9352
9353 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9354 buffer, omp_clause_code_name[TREE_CODE (node)],
9355 function, trim_filename (file), line);
9356 }
9357
9358
9359 #undef DEFTREESTRUCT
9360 #define DEFTREESTRUCT(VAL, NAME) NAME,
9361
9362 static const char *ts_enum_names[] = {
9363 #include "treestruct.def"
9364 };
9365 #undef DEFTREESTRUCT
9366
9367 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9368
9369 /* Similar to tree_class_check_failed, except that we check for
9370 whether CODE contains the tree structure identified by EN. */
9371
9372 void
9373 tree_contains_struct_check_failed (const_tree node,
9374 const enum tree_node_structure_enum en,
9375 const char *file, int line,
9376 const char *function)
9377 {
9378 internal_error
9379 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9380 TS_ENUM_NAME (en),
9381 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9382 }
9383
9384
9385 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9386 (dynamically sized) vector. */
9387
9388 void
9389 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9390 const char *function)
9391 {
9392 internal_error
9393 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9394 idx + 1, len, function, trim_filename (file), line);
9395 }
9396
9397 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9398 (dynamically sized) vector. */
9399
9400 void
9401 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9402 const char *function)
9403 {
9404 internal_error
9405 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9406 idx + 1, len, function, trim_filename (file), line);
9407 }
9408
9409 /* Similar to above, except that the check is for the bounds of the operand
9410 vector of an expression node EXP. */
9411
9412 void
9413 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9414 int line, const char *function)
9415 {
9416 enum tree_code code = TREE_CODE (exp);
9417 internal_error
9418 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9419 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9420 function, trim_filename (file), line);
9421 }
9422
9423 /* Similar to above, except that the check is for the number of
9424 operands of an OMP_CLAUSE node. */
9425
9426 void
9427 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9428 int line, const char *function)
9429 {
9430 internal_error
9431 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9432 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9433 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9434 trim_filename (file), line);
9435 }
9436 #endif /* ENABLE_TREE_CHECKING */
9437 \f
9438 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9439 and mapped to the machine mode MODE. Initialize its fields and build
9440 the information necessary for debugging output. */
9441
9442 static tree
9443 make_vector_type (tree innertype, int nunits, enum machine_mode mode)
9444 {
9445 tree t;
9446 hashval_t hashcode = 0;
9447
9448 t = make_node (VECTOR_TYPE);
9449 TREE_TYPE (t) = TYPE_MAIN_VARIANT (innertype);
9450 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9451 SET_TYPE_MODE (t, mode);
9452
9453 if (TYPE_STRUCTURAL_EQUALITY_P (innertype))
9454 SET_TYPE_STRUCTURAL_EQUALITY (t);
9455 else if (TYPE_CANONICAL (innertype) != innertype
9456 || mode != VOIDmode)
9457 TYPE_CANONICAL (t)
9458 = make_vector_type (TYPE_CANONICAL (innertype), nunits, VOIDmode);
9459
9460 layout_type (t);
9461
9462 hashcode = iterative_hash_host_wide_int (VECTOR_TYPE, hashcode);
9463 hashcode = iterative_hash_host_wide_int (nunits, hashcode);
9464 hashcode = iterative_hash_host_wide_int (mode, hashcode);
9465 hashcode = iterative_hash_object (TYPE_HASH (TREE_TYPE (t)), hashcode);
9466 t = type_hash_canon (hashcode, t);
9467
9468 /* We have built a main variant, based on the main variant of the
9469 inner type. Use it to build the variant we return. */
9470 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9471 && TREE_TYPE (t) != innertype)
9472 return build_type_attribute_qual_variant (t,
9473 TYPE_ATTRIBUTES (innertype),
9474 TYPE_QUALS (innertype));
9475
9476 return t;
9477 }
9478
9479 static tree
9480 make_or_reuse_type (unsigned size, int unsignedp)
9481 {
9482 if (size == INT_TYPE_SIZE)
9483 return unsignedp ? unsigned_type_node : integer_type_node;
9484 if (size == CHAR_TYPE_SIZE)
9485 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9486 if (size == SHORT_TYPE_SIZE)
9487 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9488 if (size == LONG_TYPE_SIZE)
9489 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9490 if (size == LONG_LONG_TYPE_SIZE)
9491 return (unsignedp ? long_long_unsigned_type_node
9492 : long_long_integer_type_node);
9493 if (size == 128 && int128_integer_type_node)
9494 return (unsignedp ? int128_unsigned_type_node
9495 : int128_integer_type_node);
9496
9497 if (unsignedp)
9498 return make_unsigned_type (size);
9499 else
9500 return make_signed_type (size);
9501 }
9502
9503 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9504
9505 static tree
9506 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9507 {
9508 if (satp)
9509 {
9510 if (size == SHORT_FRACT_TYPE_SIZE)
9511 return unsignedp ? sat_unsigned_short_fract_type_node
9512 : sat_short_fract_type_node;
9513 if (size == FRACT_TYPE_SIZE)
9514 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9515 if (size == LONG_FRACT_TYPE_SIZE)
9516 return unsignedp ? sat_unsigned_long_fract_type_node
9517 : sat_long_fract_type_node;
9518 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9519 return unsignedp ? sat_unsigned_long_long_fract_type_node
9520 : sat_long_long_fract_type_node;
9521 }
9522 else
9523 {
9524 if (size == SHORT_FRACT_TYPE_SIZE)
9525 return unsignedp ? unsigned_short_fract_type_node
9526 : short_fract_type_node;
9527 if (size == FRACT_TYPE_SIZE)
9528 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9529 if (size == LONG_FRACT_TYPE_SIZE)
9530 return unsignedp ? unsigned_long_fract_type_node
9531 : long_fract_type_node;
9532 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9533 return unsignedp ? unsigned_long_long_fract_type_node
9534 : long_long_fract_type_node;
9535 }
9536
9537 return make_fract_type (size, unsignedp, satp);
9538 }
9539
9540 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9541
9542 static tree
9543 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9544 {
9545 if (satp)
9546 {
9547 if (size == SHORT_ACCUM_TYPE_SIZE)
9548 return unsignedp ? sat_unsigned_short_accum_type_node
9549 : sat_short_accum_type_node;
9550 if (size == ACCUM_TYPE_SIZE)
9551 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9552 if (size == LONG_ACCUM_TYPE_SIZE)
9553 return unsignedp ? sat_unsigned_long_accum_type_node
9554 : sat_long_accum_type_node;
9555 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9556 return unsignedp ? sat_unsigned_long_long_accum_type_node
9557 : sat_long_long_accum_type_node;
9558 }
9559 else
9560 {
9561 if (size == SHORT_ACCUM_TYPE_SIZE)
9562 return unsignedp ? unsigned_short_accum_type_node
9563 : short_accum_type_node;
9564 if (size == ACCUM_TYPE_SIZE)
9565 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9566 if (size == LONG_ACCUM_TYPE_SIZE)
9567 return unsignedp ? unsigned_long_accum_type_node
9568 : long_accum_type_node;
9569 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9570 return unsignedp ? unsigned_long_long_accum_type_node
9571 : long_long_accum_type_node;
9572 }
9573
9574 return make_accum_type (size, unsignedp, satp);
9575 }
9576
9577 /* Create nodes for all integer types (and error_mark_node) using the sizes
9578 of C datatypes. SIGNED_CHAR specifies whether char is signed,
9579 SHORT_DOUBLE specifies whether double should be of the same precision
9580 as float. */
9581
9582 void
9583 build_common_tree_nodes (bool signed_char, bool short_double)
9584 {
9585 error_mark_node = make_node (ERROR_MARK);
9586 TREE_TYPE (error_mark_node) = error_mark_node;
9587
9588 initialize_sizetypes ();
9589
9590 /* Define both `signed char' and `unsigned char'. */
9591 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9592 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9593 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9594 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9595
9596 /* Define `char', which is like either `signed char' or `unsigned char'
9597 but not the same as either. */
9598 char_type_node
9599 = (signed_char
9600 ? make_signed_type (CHAR_TYPE_SIZE)
9601 : make_unsigned_type (CHAR_TYPE_SIZE));
9602 TYPE_STRING_FLAG (char_type_node) = 1;
9603
9604 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9605 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9606 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9607 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9608 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9609 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9610 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9611 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9612 #if HOST_BITS_PER_WIDE_INT >= 64
9613 /* TODO: This isn't correct, but as logic depends at the moment on
9614 host's instead of target's wide-integer.
9615 If there is a target not supporting TImode, but has an 128-bit
9616 integer-scalar register, this target check needs to be adjusted. */
9617 if (targetm.scalar_mode_supported_p (TImode))
9618 {
9619 int128_integer_type_node = make_signed_type (128);
9620 int128_unsigned_type_node = make_unsigned_type (128);
9621 }
9622 #endif
9623
9624 /* Define a boolean type. This type only represents boolean values but
9625 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9626 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9627 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9628 TYPE_PRECISION (boolean_type_node) = 1;
9629 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9630
9631 /* Define what type to use for size_t. */
9632 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9633 size_type_node = unsigned_type_node;
9634 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9635 size_type_node = long_unsigned_type_node;
9636 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9637 size_type_node = long_long_unsigned_type_node;
9638 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9639 size_type_node = short_unsigned_type_node;
9640 else
9641 gcc_unreachable ();
9642
9643 /* Fill in the rest of the sized types. Reuse existing type nodes
9644 when possible. */
9645 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9646 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9647 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9648 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9649 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9650
9651 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9652 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9653 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9654 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9655 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9656
9657 access_public_node = get_identifier ("public");
9658 access_protected_node = get_identifier ("protected");
9659 access_private_node = get_identifier ("private");
9660
9661 /* Define these next since types below may used them. */
9662 integer_zero_node = build_int_cst (integer_type_node, 0);
9663 integer_one_node = build_int_cst (integer_type_node, 1);
9664 integer_three_node = build_int_cst (integer_type_node, 3);
9665 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9666
9667 size_zero_node = size_int (0);
9668 size_one_node = size_int (1);
9669 bitsize_zero_node = bitsize_int (0);
9670 bitsize_one_node = bitsize_int (1);
9671 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9672
9673 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9674 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9675
9676 void_type_node = make_node (VOID_TYPE);
9677 layout_type (void_type_node);
9678
9679 /* We are not going to have real types in C with less than byte alignment,
9680 so we might as well not have any types that claim to have it. */
9681 TYPE_ALIGN (void_type_node) = BITS_PER_UNIT;
9682 TYPE_USER_ALIGN (void_type_node) = 0;
9683
9684 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9685 layout_type (TREE_TYPE (null_pointer_node));
9686
9687 ptr_type_node = build_pointer_type (void_type_node);
9688 const_ptr_type_node
9689 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9690 fileptr_type_node = ptr_type_node;
9691
9692 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9693
9694 float_type_node = make_node (REAL_TYPE);
9695 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9696 layout_type (float_type_node);
9697
9698 double_type_node = make_node (REAL_TYPE);
9699 if (short_double)
9700 TYPE_PRECISION (double_type_node) = FLOAT_TYPE_SIZE;
9701 else
9702 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9703 layout_type (double_type_node);
9704
9705 long_double_type_node = make_node (REAL_TYPE);
9706 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9707 layout_type (long_double_type_node);
9708
9709 float_ptr_type_node = build_pointer_type (float_type_node);
9710 double_ptr_type_node = build_pointer_type (double_type_node);
9711 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9712 integer_ptr_type_node = build_pointer_type (integer_type_node);
9713
9714 /* Fixed size integer types. */
9715 uint16_type_node = build_nonstandard_integer_type (16, true);
9716 uint32_type_node = build_nonstandard_integer_type (32, true);
9717 uint64_type_node = build_nonstandard_integer_type (64, true);
9718
9719 /* Decimal float types. */
9720 dfloat32_type_node = make_node (REAL_TYPE);
9721 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9722 layout_type (dfloat32_type_node);
9723 SET_TYPE_MODE (dfloat32_type_node, SDmode);
9724 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
9725
9726 dfloat64_type_node = make_node (REAL_TYPE);
9727 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9728 layout_type (dfloat64_type_node);
9729 SET_TYPE_MODE (dfloat64_type_node, DDmode);
9730 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
9731
9732 dfloat128_type_node = make_node (REAL_TYPE);
9733 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9734 layout_type (dfloat128_type_node);
9735 SET_TYPE_MODE (dfloat128_type_node, TDmode);
9736 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
9737
9738 complex_integer_type_node = build_complex_type (integer_type_node);
9739 complex_float_type_node = build_complex_type (float_type_node);
9740 complex_double_type_node = build_complex_type (double_type_node);
9741 complex_long_double_type_node = build_complex_type (long_double_type_node);
9742
9743 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9744 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9745 sat_ ## KIND ## _type_node = \
9746 make_sat_signed_ ## KIND ## _type (SIZE); \
9747 sat_unsigned_ ## KIND ## _type_node = \
9748 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9749 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9750 unsigned_ ## KIND ## _type_node = \
9751 make_unsigned_ ## KIND ## _type (SIZE);
9752
9753 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9754 sat_ ## WIDTH ## KIND ## _type_node = \
9755 make_sat_signed_ ## KIND ## _type (SIZE); \
9756 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9757 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9758 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9759 unsigned_ ## WIDTH ## KIND ## _type_node = \
9760 make_unsigned_ ## KIND ## _type (SIZE);
9761
9762 /* Make fixed-point type nodes based on four different widths. */
9763 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9764 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9765 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9766 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9767 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9768
9769 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9770 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9771 NAME ## _type_node = \
9772 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9773 u ## NAME ## _type_node = \
9774 make_or_reuse_unsigned_ ## KIND ## _type \
9775 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9776 sat_ ## NAME ## _type_node = \
9777 make_or_reuse_sat_signed_ ## KIND ## _type \
9778 (GET_MODE_BITSIZE (MODE ## mode)); \
9779 sat_u ## NAME ## _type_node = \
9780 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9781 (GET_MODE_BITSIZE (U ## MODE ## mode));
9782
9783 /* Fixed-point type and mode nodes. */
9784 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
9785 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
9786 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
9787 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
9788 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
9789 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
9790 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
9791 MAKE_FIXED_MODE_NODE (accum, ha, HA)
9792 MAKE_FIXED_MODE_NODE (accum, sa, SA)
9793 MAKE_FIXED_MODE_NODE (accum, da, DA)
9794 MAKE_FIXED_MODE_NODE (accum, ta, TA)
9795
9796 {
9797 tree t = targetm.build_builtin_va_list ();
9798
9799 /* Many back-ends define record types without setting TYPE_NAME.
9800 If we copied the record type here, we'd keep the original
9801 record type without a name. This breaks name mangling. So,
9802 don't copy record types and let c_common_nodes_and_builtins()
9803 declare the type to be __builtin_va_list. */
9804 if (TREE_CODE (t) != RECORD_TYPE)
9805 t = build_variant_type_copy (t);
9806
9807 va_list_type_node = t;
9808 }
9809 }
9810
9811 /* Modify DECL for given flags.
9812 TM_PURE attribute is set only on types, so the function will modify
9813 DECL's type when ECF_TM_PURE is used. */
9814
9815 void
9816 set_call_expr_flags (tree decl, int flags)
9817 {
9818 if (flags & ECF_NOTHROW)
9819 TREE_NOTHROW (decl) = 1;
9820 if (flags & ECF_CONST)
9821 TREE_READONLY (decl) = 1;
9822 if (flags & ECF_PURE)
9823 DECL_PURE_P (decl) = 1;
9824 if (flags & ECF_LOOPING_CONST_OR_PURE)
9825 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
9826 if (flags & ECF_NOVOPS)
9827 DECL_IS_NOVOPS (decl) = 1;
9828 if (flags & ECF_NORETURN)
9829 TREE_THIS_VOLATILE (decl) = 1;
9830 if (flags & ECF_MALLOC)
9831 DECL_IS_MALLOC (decl) = 1;
9832 if (flags & ECF_RETURNS_TWICE)
9833 DECL_IS_RETURNS_TWICE (decl) = 1;
9834 if (flags & ECF_LEAF)
9835 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
9836 NULL, DECL_ATTRIBUTES (decl));
9837 if ((flags & ECF_TM_PURE) && flag_tm)
9838 apply_tm_attr (decl, get_identifier ("transaction_pure"));
9839 /* Looping const or pure is implied by noreturn.
9840 There is currently no way to declare looping const or looping pure alone. */
9841 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
9842 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
9843 }
9844
9845
9846 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9847
9848 static void
9849 local_define_builtin (const char *name, tree type, enum built_in_function code,
9850 const char *library_name, int ecf_flags)
9851 {
9852 tree decl;
9853
9854 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
9855 library_name, NULL_TREE);
9856 set_call_expr_flags (decl, ecf_flags);
9857
9858 set_builtin_decl (code, decl, true);
9859 }
9860
9861 /* Call this function after instantiating all builtins that the language
9862 front end cares about. This will build the rest of the builtins that
9863 are relied upon by the tree optimizers and the middle-end. */
9864
9865 void
9866 build_common_builtin_nodes (void)
9867 {
9868 tree tmp, ftype;
9869 int ecf_flags;
9870
9871 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
9872 {
9873 ftype = build_function_type (void_type_node, void_list_node);
9874 local_define_builtin ("__builtin_unreachable", ftype, BUILT_IN_UNREACHABLE,
9875 "__builtin_unreachable",
9876 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
9877 | ECF_CONST | ECF_LEAF);
9878 }
9879
9880 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
9881 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9882 {
9883 ftype = build_function_type_list (ptr_type_node,
9884 ptr_type_node, const_ptr_type_node,
9885 size_type_node, NULL_TREE);
9886
9887 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
9888 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
9889 "memcpy", ECF_NOTHROW | ECF_LEAF);
9890 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9891 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
9892 "memmove", ECF_NOTHROW | ECF_LEAF);
9893 }
9894
9895 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
9896 {
9897 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9898 const_ptr_type_node, size_type_node,
9899 NULL_TREE);
9900 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
9901 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9902 }
9903
9904 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
9905 {
9906 ftype = build_function_type_list (ptr_type_node,
9907 ptr_type_node, integer_type_node,
9908 size_type_node, NULL_TREE);
9909 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
9910 "memset", ECF_NOTHROW | ECF_LEAF);
9911 }
9912
9913 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
9914 {
9915 ftype = build_function_type_list (ptr_type_node,
9916 size_type_node, NULL_TREE);
9917 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
9918 "alloca", ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
9919 }
9920
9921 ftype = build_function_type_list (ptr_type_node, size_type_node,
9922 size_type_node, NULL_TREE);
9923 local_define_builtin ("__builtin_alloca_with_align", ftype,
9924 BUILT_IN_ALLOCA_WITH_ALIGN, "alloca",
9925 ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
9926
9927 /* If we're checking the stack, `alloca' can throw. */
9928 if (flag_stack_check)
9929 {
9930 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA)) = 0;
9931 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN)) = 0;
9932 }
9933
9934 ftype = build_function_type_list (void_type_node,
9935 ptr_type_node, ptr_type_node,
9936 ptr_type_node, NULL_TREE);
9937 local_define_builtin ("__builtin_init_trampoline", ftype,
9938 BUILT_IN_INIT_TRAMPOLINE,
9939 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
9940 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
9941 BUILT_IN_INIT_HEAP_TRAMPOLINE,
9942 "__builtin_init_heap_trampoline",
9943 ECF_NOTHROW | ECF_LEAF);
9944
9945 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
9946 local_define_builtin ("__builtin_adjust_trampoline", ftype,
9947 BUILT_IN_ADJUST_TRAMPOLINE,
9948 "__builtin_adjust_trampoline",
9949 ECF_CONST | ECF_NOTHROW);
9950
9951 ftype = build_function_type_list (void_type_node,
9952 ptr_type_node, ptr_type_node, NULL_TREE);
9953 local_define_builtin ("__builtin_nonlocal_goto", ftype,
9954 BUILT_IN_NONLOCAL_GOTO,
9955 "__builtin_nonlocal_goto",
9956 ECF_NORETURN | ECF_NOTHROW);
9957
9958 ftype = build_function_type_list (void_type_node,
9959 ptr_type_node, ptr_type_node, NULL_TREE);
9960 local_define_builtin ("__builtin_setjmp_setup", ftype,
9961 BUILT_IN_SETJMP_SETUP,
9962 "__builtin_setjmp_setup", ECF_NOTHROW);
9963
9964 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
9965 local_define_builtin ("__builtin_setjmp_dispatcher", ftype,
9966 BUILT_IN_SETJMP_DISPATCHER,
9967 "__builtin_setjmp_dispatcher",
9968 ECF_PURE | ECF_NOTHROW);
9969
9970 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9971 local_define_builtin ("__builtin_setjmp_receiver", ftype,
9972 BUILT_IN_SETJMP_RECEIVER,
9973 "__builtin_setjmp_receiver", ECF_NOTHROW);
9974
9975 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
9976 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
9977 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
9978
9979 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9980 local_define_builtin ("__builtin_stack_restore", ftype,
9981 BUILT_IN_STACK_RESTORE,
9982 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
9983
9984 /* If there's a possibility that we might use the ARM EABI, build the
9985 alternate __cxa_end_cleanup node used to resume from C++ and Java. */
9986 if (targetm.arm_eabi_unwinder)
9987 {
9988 ftype = build_function_type_list (void_type_node, NULL_TREE);
9989 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
9990 BUILT_IN_CXA_END_CLEANUP,
9991 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
9992 }
9993
9994 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9995 local_define_builtin ("__builtin_unwind_resume", ftype,
9996 BUILT_IN_UNWIND_RESUME,
9997 ((targetm_common.except_unwind_info (&global_options)
9998 == UI_SJLJ)
9999 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10000 ECF_NORETURN);
10001
10002 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10003 {
10004 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10005 NULL_TREE);
10006 local_define_builtin ("__builtin_return_address", ftype,
10007 BUILT_IN_RETURN_ADDRESS,
10008 "__builtin_return_address",
10009 ECF_NOTHROW);
10010 }
10011
10012 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10013 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10014 {
10015 ftype = build_function_type_list (void_type_node, ptr_type_node,
10016 ptr_type_node, NULL_TREE);
10017 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10018 local_define_builtin ("__cyg_profile_func_enter", ftype,
10019 BUILT_IN_PROFILE_FUNC_ENTER,
10020 "__cyg_profile_func_enter", 0);
10021 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10022 local_define_builtin ("__cyg_profile_func_exit", ftype,
10023 BUILT_IN_PROFILE_FUNC_EXIT,
10024 "__cyg_profile_func_exit", 0);
10025 }
10026
10027 /* The exception object and filter values from the runtime. The argument
10028 must be zero before exception lowering, i.e. from the front end. After
10029 exception lowering, it will be the region number for the exception
10030 landing pad. These functions are PURE instead of CONST to prevent
10031 them from being hoisted past the exception edge that will initialize
10032 its value in the landing pad. */
10033 ftype = build_function_type_list (ptr_type_node,
10034 integer_type_node, NULL_TREE);
10035 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10036 /* Only use TM_PURE if we we have TM language support. */
10037 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10038 ecf_flags |= ECF_TM_PURE;
10039 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10040 "__builtin_eh_pointer", ecf_flags);
10041
10042 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10043 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10044 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10045 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10046
10047 ftype = build_function_type_list (void_type_node,
10048 integer_type_node, integer_type_node,
10049 NULL_TREE);
10050 local_define_builtin ("__builtin_eh_copy_values", ftype,
10051 BUILT_IN_EH_COPY_VALUES,
10052 "__builtin_eh_copy_values", ECF_NOTHROW);
10053
10054 /* Complex multiplication and division. These are handled as builtins
10055 rather than optabs because emit_library_call_value doesn't support
10056 complex. Further, we can do slightly better with folding these
10057 beasties if the real and complex parts of the arguments are separate. */
10058 {
10059 int mode;
10060
10061 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10062 {
10063 char mode_name_buf[4], *q;
10064 const char *p;
10065 enum built_in_function mcode, dcode;
10066 tree type, inner_type;
10067 const char *prefix = "__";
10068
10069 if (targetm.libfunc_gnu_prefix)
10070 prefix = "__gnu_";
10071
10072 type = lang_hooks.types.type_for_mode ((enum machine_mode) mode, 0);
10073 if (type == NULL)
10074 continue;
10075 inner_type = TREE_TYPE (type);
10076
10077 ftype = build_function_type_list (type, inner_type, inner_type,
10078 inner_type, inner_type, NULL_TREE);
10079
10080 mcode = ((enum built_in_function)
10081 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10082 dcode = ((enum built_in_function)
10083 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10084
10085 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10086 *q = TOLOWER (*p);
10087 *q = '\0';
10088
10089 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10090 NULL);
10091 local_define_builtin (built_in_names[mcode], ftype, mcode,
10092 built_in_names[mcode],
10093 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10094
10095 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10096 NULL);
10097 local_define_builtin (built_in_names[dcode], ftype, dcode,
10098 built_in_names[dcode],
10099 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10100 }
10101 }
10102 }
10103
10104 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10105 better way.
10106
10107 If we requested a pointer to a vector, build up the pointers that
10108 we stripped off while looking for the inner type. Similarly for
10109 return values from functions.
10110
10111 The argument TYPE is the top of the chain, and BOTTOM is the
10112 new type which we will point to. */
10113
10114 tree
10115 reconstruct_complex_type (tree type, tree bottom)
10116 {
10117 tree inner, outer;
10118
10119 if (TREE_CODE (type) == POINTER_TYPE)
10120 {
10121 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10122 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10123 TYPE_REF_CAN_ALIAS_ALL (type));
10124 }
10125 else if (TREE_CODE (type) == REFERENCE_TYPE)
10126 {
10127 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10128 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10129 TYPE_REF_CAN_ALIAS_ALL (type));
10130 }
10131 else if (TREE_CODE (type) == ARRAY_TYPE)
10132 {
10133 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10134 outer = build_array_type (inner, TYPE_DOMAIN (type));
10135 }
10136 else if (TREE_CODE (type) == FUNCTION_TYPE)
10137 {
10138 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10139 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10140 }
10141 else if (TREE_CODE (type) == METHOD_TYPE)
10142 {
10143 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10144 /* The build_method_type_directly() routine prepends 'this' to argument list,
10145 so we must compensate by getting rid of it. */
10146 outer
10147 = build_method_type_directly
10148 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10149 inner,
10150 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10151 }
10152 else if (TREE_CODE (type) == OFFSET_TYPE)
10153 {
10154 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10155 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10156 }
10157 else
10158 return bottom;
10159
10160 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10161 TYPE_QUALS (type));
10162 }
10163
10164 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10165 the inner type. */
10166 tree
10167 build_vector_type_for_mode (tree innertype, enum machine_mode mode)
10168 {
10169 int nunits;
10170
10171 switch (GET_MODE_CLASS (mode))
10172 {
10173 case MODE_VECTOR_INT:
10174 case MODE_VECTOR_FLOAT:
10175 case MODE_VECTOR_FRACT:
10176 case MODE_VECTOR_UFRACT:
10177 case MODE_VECTOR_ACCUM:
10178 case MODE_VECTOR_UACCUM:
10179 nunits = GET_MODE_NUNITS (mode);
10180 break;
10181
10182 case MODE_INT:
10183 /* Check that there are no leftover bits. */
10184 gcc_assert (GET_MODE_BITSIZE (mode)
10185 % tree_to_hwi (TYPE_SIZE (innertype)) == 0);
10186
10187 nunits = GET_MODE_BITSIZE (mode)
10188 / tree_to_hwi (TYPE_SIZE (innertype));
10189 break;
10190
10191 default:
10192 gcc_unreachable ();
10193 }
10194
10195 return make_vector_type (innertype, nunits, mode);
10196 }
10197
10198 /* Similarly, but takes the inner type and number of units, which must be
10199 a power of two. */
10200
10201 tree
10202 build_vector_type (tree innertype, int nunits)
10203 {
10204 return make_vector_type (innertype, nunits, VOIDmode);
10205 }
10206
10207 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10208
10209 tree
10210 build_opaque_vector_type (tree innertype, int nunits)
10211 {
10212 tree t = make_vector_type (innertype, nunits, VOIDmode);
10213 tree cand;
10214 /* We always build the non-opaque variant before the opaque one,
10215 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10216 cand = TYPE_NEXT_VARIANT (t);
10217 if (cand
10218 && TYPE_VECTOR_OPAQUE (cand)
10219 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10220 return cand;
10221 /* Othewise build a variant type and make sure to queue it after
10222 the non-opaque type. */
10223 cand = build_distinct_type_copy (t);
10224 TYPE_VECTOR_OPAQUE (cand) = true;
10225 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10226 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10227 TYPE_NEXT_VARIANT (t) = cand;
10228 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10229 return cand;
10230 }
10231
10232
10233 /* Given an initializer INIT, return TRUE if INIT is zero or some
10234 aggregate of zeros. Otherwise return FALSE. */
10235 bool
10236 initializer_zerop (const_tree init)
10237 {
10238 tree elt;
10239
10240 STRIP_NOPS (init);
10241
10242 switch (TREE_CODE (init))
10243 {
10244 case INTEGER_CST:
10245 return integer_zerop (init);
10246
10247 case REAL_CST:
10248 /* ??? Note that this is not correct for C4X float formats. There,
10249 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10250 negative exponent. */
10251 return real_zerop (init)
10252 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
10253
10254 case FIXED_CST:
10255 return fixed_zerop (init);
10256
10257 case COMPLEX_CST:
10258 return integer_zerop (init)
10259 || (real_zerop (init)
10260 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10261 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
10262
10263 case VECTOR_CST:
10264 {
10265 unsigned i;
10266 for (i = 0; i < VECTOR_CST_NELTS (init); ++i)
10267 if (!initializer_zerop (VECTOR_CST_ELT (init, i)))
10268 return false;
10269 return true;
10270 }
10271
10272 case CONSTRUCTOR:
10273 {
10274 unsigned HOST_WIDE_INT idx;
10275
10276 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10277 if (!initializer_zerop (elt))
10278 return false;
10279 return true;
10280 }
10281
10282 case STRING_CST:
10283 {
10284 int i;
10285
10286 /* We need to loop through all elements to handle cases like
10287 "\0" and "\0foobar". */
10288 for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
10289 if (TREE_STRING_POINTER (init)[i] != '\0')
10290 return false;
10291
10292 return true;
10293 }
10294
10295 default:
10296 return false;
10297 }
10298 }
10299
10300 /* Check if vector VEC consists of all the equal elements and
10301 that the number of elements corresponds to the type of VEC.
10302 The function returns first element of the vector
10303 or NULL_TREE if the vector is not uniform. */
10304 tree
10305 uniform_vector_p (const_tree vec)
10306 {
10307 tree first, t;
10308 unsigned i;
10309
10310 if (vec == NULL_TREE)
10311 return NULL_TREE;
10312
10313 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10314
10315 if (TREE_CODE (vec) == VECTOR_CST)
10316 {
10317 first = VECTOR_CST_ELT (vec, 0);
10318 for (i = 1; i < VECTOR_CST_NELTS (vec); ++i)
10319 if (!operand_equal_p (first, VECTOR_CST_ELT (vec, i), 0))
10320 return NULL_TREE;
10321
10322 return first;
10323 }
10324
10325 else if (TREE_CODE (vec) == CONSTRUCTOR)
10326 {
10327 first = error_mark_node;
10328
10329 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10330 {
10331 if (i == 0)
10332 {
10333 first = t;
10334 continue;
10335 }
10336 if (!operand_equal_p (first, t, 0))
10337 return NULL_TREE;
10338 }
10339 if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)))
10340 return NULL_TREE;
10341
10342 return first;
10343 }
10344
10345 return NULL_TREE;
10346 }
10347
10348 /* Build an empty statement at location LOC. */
10349
10350 tree
10351 build_empty_stmt (location_t loc)
10352 {
10353 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10354 SET_EXPR_LOCATION (t, loc);
10355 return t;
10356 }
10357
10358
10359 /* Build an OpenMP clause with code CODE. LOC is the location of the
10360 clause. */
10361
10362 tree
10363 build_omp_clause (location_t loc, enum omp_clause_code code)
10364 {
10365 tree t;
10366 int size, length;
10367
10368 length = omp_clause_num_ops[code];
10369 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10370
10371 record_node_allocation_statistics (OMP_CLAUSE, size);
10372
10373 t = ggc_alloc_tree_node (size);
10374 memset (t, 0, size);
10375 TREE_SET_CODE (t, OMP_CLAUSE);
10376 OMP_CLAUSE_SET_CODE (t, code);
10377 OMP_CLAUSE_LOCATION (t) = loc;
10378
10379 return t;
10380 }
10381
10382 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10383 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10384 Except for the CODE and operand count field, other storage for the
10385 object is initialized to zeros. */
10386
10387 tree
10388 build_vl_exp_stat (enum tree_code code, int len MEM_STAT_DECL)
10389 {
10390 tree t;
10391 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10392
10393 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10394 gcc_assert (len >= 1);
10395
10396 record_node_allocation_statistics (code, length);
10397
10398 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10399
10400 TREE_SET_CODE (t, code);
10401
10402 /* Can't use TREE_OPERAND to store the length because if checking is
10403 enabled, it will try to check the length before we store it. :-P */
10404 t->exp.operands[0] = build_int_cst (sizetype, len);
10405
10406 return t;
10407 }
10408
10409 /* Helper function for build_call_* functions; build a CALL_EXPR with
10410 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10411 the argument slots. */
10412
10413 static tree
10414 build_call_1 (tree return_type, tree fn, int nargs)
10415 {
10416 tree t;
10417
10418 t = build_vl_exp (CALL_EXPR, nargs + 3);
10419 TREE_TYPE (t) = return_type;
10420 CALL_EXPR_FN (t) = fn;
10421 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10422
10423 return t;
10424 }
10425
10426 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10427 FN and a null static chain slot. NARGS is the number of call arguments
10428 which are specified as "..." arguments. */
10429
10430 tree
10431 build_call_nary (tree return_type, tree fn, int nargs, ...)
10432 {
10433 tree ret;
10434 va_list args;
10435 va_start (args, nargs);
10436 ret = build_call_valist (return_type, fn, nargs, args);
10437 va_end (args);
10438 return ret;
10439 }
10440
10441 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10442 FN and a null static chain slot. NARGS is the number of call arguments
10443 which are specified as a va_list ARGS. */
10444
10445 tree
10446 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10447 {
10448 tree t;
10449 int i;
10450
10451 t = build_call_1 (return_type, fn, nargs);
10452 for (i = 0; i < nargs; i++)
10453 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10454 process_call_operands (t);
10455 return t;
10456 }
10457
10458 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10459 FN and a null static chain slot. NARGS is the number of call arguments
10460 which are specified as a tree array ARGS. */
10461
10462 tree
10463 build_call_array_loc (location_t loc, tree return_type, tree fn,
10464 int nargs, const tree *args)
10465 {
10466 tree t;
10467 int i;
10468
10469 t = build_call_1 (return_type, fn, nargs);
10470 for (i = 0; i < nargs; i++)
10471 CALL_EXPR_ARG (t, i) = args[i];
10472 process_call_operands (t);
10473 SET_EXPR_LOCATION (t, loc);
10474 return t;
10475 }
10476
10477 /* Like build_call_array, but takes a vec. */
10478
10479 tree
10480 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
10481 {
10482 tree ret, t;
10483 unsigned int ix;
10484
10485 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10486 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10487 CALL_EXPR_ARG (ret, ix) = t;
10488 process_call_operands (ret);
10489 return ret;
10490 }
10491
10492
10493 /* Returns true if it is possible to prove that the index of
10494 an array access REF (an ARRAY_REF expression) falls into the
10495 array bounds. */
10496
10497 bool
10498 in_array_bounds_p (tree ref)
10499 {
10500 tree idx = TREE_OPERAND (ref, 1);
10501 tree min, max;
10502
10503 if (TREE_CODE (idx) != INTEGER_CST)
10504 return false;
10505
10506 min = array_ref_low_bound (ref);
10507 max = array_ref_up_bound (ref);
10508 if (!min
10509 || !max
10510 || TREE_CODE (min) != INTEGER_CST
10511 || TREE_CODE (max) != INTEGER_CST)
10512 return false;
10513
10514 if (tree_int_cst_lt (idx, min)
10515 || tree_int_cst_lt (max, idx))
10516 return false;
10517
10518 return true;
10519 }
10520
10521 /* Returns true if it is possible to prove that the range of
10522 an array access REF (an ARRAY_RANGE_REF expression) falls
10523 into the array bounds. */
10524
10525 bool
10526 range_in_array_bounds_p (tree ref)
10527 {
10528 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ref));
10529 tree range_min, range_max, min, max;
10530
10531 range_min = TYPE_MIN_VALUE (domain_type);
10532 range_max = TYPE_MAX_VALUE (domain_type);
10533 if (!range_min
10534 || !range_max
10535 || TREE_CODE (range_min) != INTEGER_CST
10536 || TREE_CODE (range_max) != INTEGER_CST)
10537 return false;
10538
10539 min = array_ref_low_bound (ref);
10540 max = array_ref_up_bound (ref);
10541 if (!min
10542 || !max
10543 || TREE_CODE (min) != INTEGER_CST
10544 || TREE_CODE (max) != INTEGER_CST)
10545 return false;
10546
10547 if (tree_int_cst_lt (range_min, min)
10548 || tree_int_cst_lt (max, range_max))
10549 return false;
10550
10551 return true;
10552 }
10553
10554 /* Return true if T (assumed to be a DECL) must be assigned a memory
10555 location. */
10556
10557 bool
10558 needs_to_live_in_memory (const_tree t)
10559 {
10560 return (TREE_ADDRESSABLE (t)
10561 || is_global_var (t)
10562 || (TREE_CODE (t) == RESULT_DECL
10563 && !DECL_BY_REFERENCE (t)
10564 && aggregate_value_p (t, current_function_decl)));
10565 }
10566
10567 /* Return value of a constant X and sign-extend it. */
10568
10569 HOST_WIDE_INT
10570 int_cst_value (const_tree x)
10571 {
10572 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10573 unsigned HOST_WIDE_INT val = tree_to_hwi (x);
10574
10575 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10576 gcc_assert (cst_fits_shwi_p (x));
10577
10578 if (bits < HOST_BITS_PER_WIDE_INT)
10579 {
10580 bool negative = ((val >> (bits - 1)) & 1) != 0;
10581 if (negative)
10582 val |= (~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1;
10583 else
10584 val &= ~((~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1);
10585 }
10586
10587 return val;
10588 }
10589
10590 /* Return value of a constant X and sign-extend it. */
10591
10592 HOST_WIDEST_INT
10593 widest_int_cst_value (const_tree x)
10594 {
10595 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10596 unsigned HOST_WIDEST_INT val = tree_to_hwi (x);
10597
10598 #if HOST_BITS_PER_WIDEST_INT > HOST_BITS_PER_WIDE_INT
10599 gcc_assert (HOST_BITS_PER_WIDEST_INT >= HOST_BITS_PER_DOUBLE_INT);
10600 gcc_assert (TREE_INT_CST_NUNITS (x) == 2);
10601
10602 if (TREE_INT_CST_NUNITS (x) == 1)
10603 val = HOST_WIDE_INT (val);
10604 else
10605 val |= (((unsigned HOST_WIDEST_INT) TREE_INT_CST_ELT (x, 1))
10606 << HOST_BITS_PER_WIDE_INT);
10607 #else
10608 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10609 gcc_assert (TREE_INT_CST_NUNITS (x) == 1);
10610 #endif
10611
10612 if (bits < HOST_BITS_PER_WIDEST_INT)
10613 {
10614 bool negative = ((val >> (bits - 1)) & 1) != 0;
10615 if (negative)
10616 val |= (~(unsigned HOST_WIDEST_INT) 0) << (bits - 1) << 1;
10617 else
10618 val &= ~((~(unsigned HOST_WIDEST_INT) 0) << (bits - 1) << 1);
10619 }
10620
10621 return val;
10622 }
10623
10624 /* If TYPE is an integral or pointer type, return an integer type with
10625 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10626 if TYPE is already an integer type of signedness UNSIGNEDP. */
10627
10628 tree
10629 signed_or_unsigned_type_for (int unsignedp, tree type)
10630 {
10631 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
10632 return type;
10633
10634 if (TREE_CODE (type) == VECTOR_TYPE)
10635 {
10636 tree inner = TREE_TYPE (type);
10637 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10638 if (!inner2)
10639 return NULL_TREE;
10640 if (inner == inner2)
10641 return type;
10642 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
10643 }
10644
10645 if (!INTEGRAL_TYPE_P (type)
10646 && !POINTER_TYPE_P (type))
10647 return NULL_TREE;
10648
10649 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
10650 }
10651
10652 /* If TYPE is an integral or pointer type, return an integer type with
10653 the same precision which is unsigned, or itself if TYPE is already an
10654 unsigned integer type. */
10655
10656 tree
10657 unsigned_type_for (tree type)
10658 {
10659 return signed_or_unsigned_type_for (1, type);
10660 }
10661
10662 /* If TYPE is an integral or pointer type, return an integer type with
10663 the same precision which is signed, or itself if TYPE is already a
10664 signed integer type. */
10665
10666 tree
10667 signed_type_for (tree type)
10668 {
10669 return signed_or_unsigned_type_for (0, type);
10670 }
10671
10672 /* If TYPE is a vector type, return a signed integer vector type with the
10673 same width and number of subparts. Otherwise return boolean_type_node. */
10674
10675 tree
10676 truth_type_for (tree type)
10677 {
10678 if (TREE_CODE (type) == VECTOR_TYPE)
10679 {
10680 tree elem = lang_hooks.types.type_for_size
10681 (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))), 0);
10682 return build_opaque_vector_type (elem, TYPE_VECTOR_SUBPARTS (type));
10683 }
10684 else
10685 return boolean_type_node;
10686 }
10687
10688 /* Returns the largest value obtainable by casting something in INNER type to
10689 OUTER type. */
10690
10691 tree
10692 upper_bound_in_type (tree outer, tree inner)
10693 {
10694 unsigned int det = 0;
10695 unsigned oprec = TYPE_PRECISION (outer);
10696 unsigned iprec = TYPE_PRECISION (inner);
10697 unsigned prec;
10698
10699 /* Compute a unique number for every combination. */
10700 det |= (oprec > iprec) ? 4 : 0;
10701 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
10702 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
10703
10704 /* Determine the exponent to use. */
10705 switch (det)
10706 {
10707 case 0:
10708 case 1:
10709 /* oprec <= iprec, outer: signed, inner: don't care. */
10710 prec = oprec - 1;
10711 break;
10712 case 2:
10713 case 3:
10714 /* oprec <= iprec, outer: unsigned, inner: don't care. */
10715 prec = oprec;
10716 break;
10717 case 4:
10718 /* oprec > iprec, outer: signed, inner: signed. */
10719 prec = iprec - 1;
10720 break;
10721 case 5:
10722 /* oprec > iprec, outer: signed, inner: unsigned. */
10723 prec = iprec;
10724 break;
10725 case 6:
10726 /* oprec > iprec, outer: unsigned, inner: signed. */
10727 prec = oprec;
10728 break;
10729 case 7:
10730 /* oprec > iprec, outer: unsigned, inner: unsigned. */
10731 prec = iprec;
10732 break;
10733 default:
10734 gcc_unreachable ();
10735 }
10736
10737 return wide_int_to_tree (outer,
10738 wi::mask (prec, false, TYPE_PRECISION (outer)));
10739 }
10740
10741 /* Returns the smallest value obtainable by casting something in INNER type to
10742 OUTER type. */
10743
10744 tree
10745 lower_bound_in_type (tree outer, tree inner)
10746 {
10747 unsigned oprec = TYPE_PRECISION (outer);
10748 unsigned iprec = TYPE_PRECISION (inner);
10749
10750 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
10751 and obtain 0. */
10752 if (TYPE_UNSIGNED (outer)
10753 /* If we are widening something of an unsigned type, OUTER type
10754 contains all values of INNER type. In particular, both INNER
10755 and OUTER types have zero in common. */
10756 || (oprec > iprec && TYPE_UNSIGNED (inner)))
10757 return build_int_cst (outer, 0);
10758 else
10759 {
10760 /* If we are widening a signed type to another signed type, we
10761 want to obtain -2^^(iprec-1). If we are keeping the
10762 precision or narrowing to a signed type, we want to obtain
10763 -2^(oprec-1). */
10764 unsigned prec = oprec > iprec ? iprec : oprec;
10765 return wide_int_to_tree (outer,
10766 wi::mask (prec - 1, true,
10767 TYPE_PRECISION (outer)));
10768 }
10769 }
10770
10771 /* Return nonzero if two operands that are suitable for PHI nodes are
10772 necessarily equal. Specifically, both ARG0 and ARG1 must be either
10773 SSA_NAME or invariant. Note that this is strictly an optimization.
10774 That is, callers of this function can directly call operand_equal_p
10775 and get the same result, only slower. */
10776
10777 int
10778 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
10779 {
10780 if (arg0 == arg1)
10781 return 1;
10782 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
10783 return 0;
10784 return operand_equal_p (arg0, arg1, 0);
10785 }
10786
10787 /* Returns number of zeros at the end of binary representation of X. */
10788
10789 tree
10790 num_ending_zeros (const_tree x)
10791 {
10792 return build_int_cst (TREE_TYPE (x), wi::ctz (x));
10793 }
10794
10795
10796 #define WALK_SUBTREE(NODE) \
10797 do \
10798 { \
10799 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
10800 if (result) \
10801 return result; \
10802 } \
10803 while (0)
10804
10805 /* This is a subroutine of walk_tree that walks field of TYPE that are to
10806 be walked whenever a type is seen in the tree. Rest of operands and return
10807 value are as for walk_tree. */
10808
10809 static tree
10810 walk_type_fields (tree type, walk_tree_fn func, void *data,
10811 struct pointer_set_t *pset, walk_tree_lh lh)
10812 {
10813 tree result = NULL_TREE;
10814
10815 switch (TREE_CODE (type))
10816 {
10817 case POINTER_TYPE:
10818 case REFERENCE_TYPE:
10819 /* We have to worry about mutually recursive pointers. These can't
10820 be written in C. They can in Ada. It's pathological, but
10821 there's an ACATS test (c38102a) that checks it. Deal with this
10822 by checking if we're pointing to another pointer, that one
10823 points to another pointer, that one does too, and we have no htab.
10824 If so, get a hash table. We check three levels deep to avoid
10825 the cost of the hash table if we don't need one. */
10826 if (POINTER_TYPE_P (TREE_TYPE (type))
10827 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
10828 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
10829 && !pset)
10830 {
10831 result = walk_tree_without_duplicates (&TREE_TYPE (type),
10832 func, data);
10833 if (result)
10834 return result;
10835
10836 break;
10837 }
10838
10839 /* ... fall through ... */
10840
10841 case COMPLEX_TYPE:
10842 WALK_SUBTREE (TREE_TYPE (type));
10843 break;
10844
10845 case METHOD_TYPE:
10846 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
10847
10848 /* Fall through. */
10849
10850 case FUNCTION_TYPE:
10851 WALK_SUBTREE (TREE_TYPE (type));
10852 {
10853 tree arg;
10854
10855 /* We never want to walk into default arguments. */
10856 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
10857 WALK_SUBTREE (TREE_VALUE (arg));
10858 }
10859 break;
10860
10861 case ARRAY_TYPE:
10862 /* Don't follow this nodes's type if a pointer for fear that
10863 we'll have infinite recursion. If we have a PSET, then we
10864 need not fear. */
10865 if (pset
10866 || (!POINTER_TYPE_P (TREE_TYPE (type))
10867 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
10868 WALK_SUBTREE (TREE_TYPE (type));
10869 WALK_SUBTREE (TYPE_DOMAIN (type));
10870 break;
10871
10872 case OFFSET_TYPE:
10873 WALK_SUBTREE (TREE_TYPE (type));
10874 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
10875 break;
10876
10877 default:
10878 break;
10879 }
10880
10881 return NULL_TREE;
10882 }
10883
10884 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
10885 called with the DATA and the address of each sub-tree. If FUNC returns a
10886 non-NULL value, the traversal is stopped, and the value returned by FUNC
10887 is returned. If PSET is non-NULL it is used to record the nodes visited,
10888 and to avoid visiting a node more than once. */
10889
10890 tree
10891 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
10892 struct pointer_set_t *pset, walk_tree_lh lh)
10893 {
10894 enum tree_code code;
10895 int walk_subtrees;
10896 tree result;
10897
10898 #define WALK_SUBTREE_TAIL(NODE) \
10899 do \
10900 { \
10901 tp = & (NODE); \
10902 goto tail_recurse; \
10903 } \
10904 while (0)
10905
10906 tail_recurse:
10907 /* Skip empty subtrees. */
10908 if (!*tp)
10909 return NULL_TREE;
10910
10911 /* Don't walk the same tree twice, if the user has requested
10912 that we avoid doing so. */
10913 if (pset && pointer_set_insert (pset, *tp))
10914 return NULL_TREE;
10915
10916 /* Call the function. */
10917 walk_subtrees = 1;
10918 result = (*func) (tp, &walk_subtrees, data);
10919
10920 /* If we found something, return it. */
10921 if (result)
10922 return result;
10923
10924 code = TREE_CODE (*tp);
10925
10926 /* Even if we didn't, FUNC may have decided that there was nothing
10927 interesting below this point in the tree. */
10928 if (!walk_subtrees)
10929 {
10930 /* But we still need to check our siblings. */
10931 if (code == TREE_LIST)
10932 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
10933 else if (code == OMP_CLAUSE)
10934 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
10935 else
10936 return NULL_TREE;
10937 }
10938
10939 if (lh)
10940 {
10941 result = (*lh) (tp, &walk_subtrees, func, data, pset);
10942 if (result || !walk_subtrees)
10943 return result;
10944 }
10945
10946 switch (code)
10947 {
10948 case ERROR_MARK:
10949 case IDENTIFIER_NODE:
10950 case INTEGER_CST:
10951 case REAL_CST:
10952 case FIXED_CST:
10953 case VECTOR_CST:
10954 case STRING_CST:
10955 case BLOCK:
10956 case PLACEHOLDER_EXPR:
10957 case SSA_NAME:
10958 case FIELD_DECL:
10959 case RESULT_DECL:
10960 /* None of these have subtrees other than those already walked
10961 above. */
10962 break;
10963
10964 case TREE_LIST:
10965 WALK_SUBTREE (TREE_VALUE (*tp));
10966 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
10967 break;
10968
10969 case TREE_VEC:
10970 {
10971 int len = TREE_VEC_LENGTH (*tp);
10972
10973 if (len == 0)
10974 break;
10975
10976 /* Walk all elements but the first. */
10977 while (--len)
10978 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
10979
10980 /* Now walk the first one as a tail call. */
10981 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
10982 }
10983
10984 case COMPLEX_CST:
10985 WALK_SUBTREE (TREE_REALPART (*tp));
10986 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
10987
10988 case CONSTRUCTOR:
10989 {
10990 unsigned HOST_WIDE_INT idx;
10991 constructor_elt *ce;
10992
10993 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
10994 idx++)
10995 WALK_SUBTREE (ce->value);
10996 }
10997 break;
10998
10999 case SAVE_EXPR:
11000 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11001
11002 case BIND_EXPR:
11003 {
11004 tree decl;
11005 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11006 {
11007 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11008 into declarations that are just mentioned, rather than
11009 declared; they don't really belong to this part of the tree.
11010 And, we can see cycles: the initializer for a declaration
11011 can refer to the declaration itself. */
11012 WALK_SUBTREE (DECL_INITIAL (decl));
11013 WALK_SUBTREE (DECL_SIZE (decl));
11014 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11015 }
11016 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11017 }
11018
11019 case STATEMENT_LIST:
11020 {
11021 tree_stmt_iterator i;
11022 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11023 WALK_SUBTREE (*tsi_stmt_ptr (i));
11024 }
11025 break;
11026
11027 case OMP_CLAUSE:
11028 switch (OMP_CLAUSE_CODE (*tp))
11029 {
11030 case OMP_CLAUSE_PRIVATE:
11031 case OMP_CLAUSE_SHARED:
11032 case OMP_CLAUSE_FIRSTPRIVATE:
11033 case OMP_CLAUSE_COPYIN:
11034 case OMP_CLAUSE_COPYPRIVATE:
11035 case OMP_CLAUSE_FINAL:
11036 case OMP_CLAUSE_IF:
11037 case OMP_CLAUSE_NUM_THREADS:
11038 case OMP_CLAUSE_SCHEDULE:
11039 case OMP_CLAUSE_UNIFORM:
11040 case OMP_CLAUSE_DEPEND:
11041 case OMP_CLAUSE_NUM_TEAMS:
11042 case OMP_CLAUSE_THREAD_LIMIT:
11043 case OMP_CLAUSE_DEVICE:
11044 case OMP_CLAUSE_DIST_SCHEDULE:
11045 case OMP_CLAUSE_SAFELEN:
11046 case OMP_CLAUSE_SIMDLEN:
11047 case OMP_CLAUSE__LOOPTEMP_:
11048 case OMP_CLAUSE__SIMDUID_:
11049 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11050 /* FALLTHRU */
11051
11052 case OMP_CLAUSE_NOWAIT:
11053 case OMP_CLAUSE_ORDERED:
11054 case OMP_CLAUSE_DEFAULT:
11055 case OMP_CLAUSE_UNTIED:
11056 case OMP_CLAUSE_MERGEABLE:
11057 case OMP_CLAUSE_PROC_BIND:
11058 case OMP_CLAUSE_INBRANCH:
11059 case OMP_CLAUSE_NOTINBRANCH:
11060 case OMP_CLAUSE_FOR:
11061 case OMP_CLAUSE_PARALLEL:
11062 case OMP_CLAUSE_SECTIONS:
11063 case OMP_CLAUSE_TASKGROUP:
11064 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11065
11066 case OMP_CLAUSE_LASTPRIVATE:
11067 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11068 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11069 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11070
11071 case OMP_CLAUSE_COLLAPSE:
11072 {
11073 int i;
11074 for (i = 0; i < 3; i++)
11075 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11076 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11077 }
11078
11079 case OMP_CLAUSE_ALIGNED:
11080 case OMP_CLAUSE_LINEAR:
11081 case OMP_CLAUSE_FROM:
11082 case OMP_CLAUSE_TO:
11083 case OMP_CLAUSE_MAP:
11084 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11085 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11086 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11087
11088 case OMP_CLAUSE_REDUCTION:
11089 {
11090 int i;
11091 for (i = 0; i < 4; i++)
11092 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11093 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11094 }
11095
11096 default:
11097 gcc_unreachable ();
11098 }
11099 break;
11100
11101 case TARGET_EXPR:
11102 {
11103 int i, len;
11104
11105 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11106 But, we only want to walk once. */
11107 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11108 for (i = 0; i < len; ++i)
11109 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11110 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11111 }
11112
11113 case DECL_EXPR:
11114 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11115 defining. We only want to walk into these fields of a type in this
11116 case and not in the general case of a mere reference to the type.
11117
11118 The criterion is as follows: if the field can be an expression, it
11119 must be walked only here. This should be in keeping with the fields
11120 that are directly gimplified in gimplify_type_sizes in order for the
11121 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11122 variable-sized types.
11123
11124 Note that DECLs get walked as part of processing the BIND_EXPR. */
11125 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11126 {
11127 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11128 if (TREE_CODE (*type_p) == ERROR_MARK)
11129 return NULL_TREE;
11130
11131 /* Call the function for the type. See if it returns anything or
11132 doesn't want us to continue. If we are to continue, walk both
11133 the normal fields and those for the declaration case. */
11134 result = (*func) (type_p, &walk_subtrees, data);
11135 if (result || !walk_subtrees)
11136 return result;
11137
11138 /* But do not walk a pointed-to type since it may itself need to
11139 be walked in the declaration case if it isn't anonymous. */
11140 if (!POINTER_TYPE_P (*type_p))
11141 {
11142 result = walk_type_fields (*type_p, func, data, pset, lh);
11143 if (result)
11144 return result;
11145 }
11146
11147 /* If this is a record type, also walk the fields. */
11148 if (RECORD_OR_UNION_TYPE_P (*type_p))
11149 {
11150 tree field;
11151
11152 for (field = TYPE_FIELDS (*type_p); field;
11153 field = DECL_CHAIN (field))
11154 {
11155 /* We'd like to look at the type of the field, but we can
11156 easily get infinite recursion. So assume it's pointed
11157 to elsewhere in the tree. Also, ignore things that
11158 aren't fields. */
11159 if (TREE_CODE (field) != FIELD_DECL)
11160 continue;
11161
11162 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11163 WALK_SUBTREE (DECL_SIZE (field));
11164 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11165 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11166 WALK_SUBTREE (DECL_QUALIFIER (field));
11167 }
11168 }
11169
11170 /* Same for scalar types. */
11171 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11172 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11173 || TREE_CODE (*type_p) == INTEGER_TYPE
11174 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11175 || TREE_CODE (*type_p) == REAL_TYPE)
11176 {
11177 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11178 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11179 }
11180
11181 WALK_SUBTREE (TYPE_SIZE (*type_p));
11182 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11183 }
11184 /* FALLTHRU */
11185
11186 default:
11187 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11188 {
11189 int i, len;
11190
11191 /* Walk over all the sub-trees of this operand. */
11192 len = TREE_OPERAND_LENGTH (*tp);
11193
11194 /* Go through the subtrees. We need to do this in forward order so
11195 that the scope of a FOR_EXPR is handled properly. */
11196 if (len)
11197 {
11198 for (i = 0; i < len - 1; ++i)
11199 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11200 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11201 }
11202 }
11203 /* If this is a type, walk the needed fields in the type. */
11204 else if (TYPE_P (*tp))
11205 return walk_type_fields (*tp, func, data, pset, lh);
11206 break;
11207 }
11208
11209 /* We didn't find what we were looking for. */
11210 return NULL_TREE;
11211
11212 #undef WALK_SUBTREE_TAIL
11213 }
11214 #undef WALK_SUBTREE
11215
11216 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11217
11218 tree
11219 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11220 walk_tree_lh lh)
11221 {
11222 tree result;
11223 struct pointer_set_t *pset;
11224
11225 pset = pointer_set_create ();
11226 result = walk_tree_1 (tp, func, data, pset, lh);
11227 pointer_set_destroy (pset);
11228 return result;
11229 }
11230
11231
11232 tree
11233 tree_block (tree t)
11234 {
11235 char const c = TREE_CODE_CLASS (TREE_CODE (t));
11236
11237 if (IS_EXPR_CODE_CLASS (c))
11238 return LOCATION_BLOCK (t->exp.locus);
11239 gcc_unreachable ();
11240 return NULL;
11241 }
11242
11243 void
11244 tree_set_block (tree t, tree b)
11245 {
11246 char const c = TREE_CODE_CLASS (TREE_CODE (t));
11247
11248 if (IS_EXPR_CODE_CLASS (c))
11249 {
11250 if (b)
11251 t->exp.locus = COMBINE_LOCATION_DATA (line_table, t->exp.locus, b);
11252 else
11253 t->exp.locus = LOCATION_LOCUS (t->exp.locus);
11254 }
11255 else
11256 gcc_unreachable ();
11257 }
11258
11259 /* Create a nameless artificial label and put it in the current
11260 function context. The label has a location of LOC. Returns the
11261 newly created label. */
11262
11263 tree
11264 create_artificial_label (location_t loc)
11265 {
11266 tree lab = build_decl (loc,
11267 LABEL_DECL, NULL_TREE, void_type_node);
11268
11269 DECL_ARTIFICIAL (lab) = 1;
11270 DECL_IGNORED_P (lab) = 1;
11271 DECL_CONTEXT (lab) = current_function_decl;
11272 return lab;
11273 }
11274
11275 /* Given a tree, try to return a useful variable name that we can use
11276 to prefix a temporary that is being assigned the value of the tree.
11277 I.E. given <temp> = &A, return A. */
11278
11279 const char *
11280 get_name (tree t)
11281 {
11282 tree stripped_decl;
11283
11284 stripped_decl = t;
11285 STRIP_NOPS (stripped_decl);
11286 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11287 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11288 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11289 {
11290 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11291 if (!name)
11292 return NULL;
11293 return IDENTIFIER_POINTER (name);
11294 }
11295 else
11296 {
11297 switch (TREE_CODE (stripped_decl))
11298 {
11299 case ADDR_EXPR:
11300 return get_name (TREE_OPERAND (stripped_decl, 0));
11301 default:
11302 return NULL;
11303 }
11304 }
11305 }
11306
11307 /* Return true if TYPE has a variable argument list. */
11308
11309 bool
11310 stdarg_p (const_tree fntype)
11311 {
11312 function_args_iterator args_iter;
11313 tree n = NULL_TREE, t;
11314
11315 if (!fntype)
11316 return false;
11317
11318 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11319 {
11320 n = t;
11321 }
11322
11323 return n != NULL_TREE && n != void_type_node;
11324 }
11325
11326 /* Return true if TYPE has a prototype. */
11327
11328 bool
11329 prototype_p (tree fntype)
11330 {
11331 tree t;
11332
11333 gcc_assert (fntype != NULL_TREE);
11334
11335 t = TYPE_ARG_TYPES (fntype);
11336 return (t != NULL_TREE);
11337 }
11338
11339 /* If BLOCK is inlined from an __attribute__((__artificial__))
11340 routine, return pointer to location from where it has been
11341 called. */
11342 location_t *
11343 block_nonartificial_location (tree block)
11344 {
11345 location_t *ret = NULL;
11346
11347 while (block && TREE_CODE (block) == BLOCK
11348 && BLOCK_ABSTRACT_ORIGIN (block))
11349 {
11350 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11351
11352 while (TREE_CODE (ao) == BLOCK
11353 && BLOCK_ABSTRACT_ORIGIN (ao)
11354 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
11355 ao = BLOCK_ABSTRACT_ORIGIN (ao);
11356
11357 if (TREE_CODE (ao) == FUNCTION_DECL)
11358 {
11359 /* If AO is an artificial inline, point RET to the
11360 call site locus at which it has been inlined and continue
11361 the loop, in case AO's caller is also an artificial
11362 inline. */
11363 if (DECL_DECLARED_INLINE_P (ao)
11364 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11365 ret = &BLOCK_SOURCE_LOCATION (block);
11366 else
11367 break;
11368 }
11369 else if (TREE_CODE (ao) != BLOCK)
11370 break;
11371
11372 block = BLOCK_SUPERCONTEXT (block);
11373 }
11374 return ret;
11375 }
11376
11377
11378 /* If EXP is inlined from an __attribute__((__artificial__))
11379 function, return the location of the original call expression. */
11380
11381 location_t
11382 tree_nonartificial_location (tree exp)
11383 {
11384 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11385
11386 if (loc)
11387 return *loc;
11388 else
11389 return EXPR_LOCATION (exp);
11390 }
11391
11392
11393 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
11394 nodes. */
11395
11396 /* Return the hash code code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11397
11398 static hashval_t
11399 cl_option_hash_hash (const void *x)
11400 {
11401 const_tree const t = (const_tree) x;
11402 const char *p;
11403 size_t i;
11404 size_t len = 0;
11405 hashval_t hash = 0;
11406
11407 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11408 {
11409 p = (const char *)TREE_OPTIMIZATION (t);
11410 len = sizeof (struct cl_optimization);
11411 }
11412
11413 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11414 {
11415 p = (const char *)TREE_TARGET_OPTION (t);
11416 len = sizeof (struct cl_target_option);
11417 }
11418
11419 else
11420 gcc_unreachable ();
11421
11422 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11423 something else. */
11424 for (i = 0; i < len; i++)
11425 if (p[i])
11426 hash = (hash << 4) ^ ((i << 2) | p[i]);
11427
11428 return hash;
11429 }
11430
11431 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11432 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11433 same. */
11434
11435 static int
11436 cl_option_hash_eq (const void *x, const void *y)
11437 {
11438 const_tree const xt = (const_tree) x;
11439 const_tree const yt = (const_tree) y;
11440 const char *xp;
11441 const char *yp;
11442 size_t len;
11443
11444 if (TREE_CODE (xt) != TREE_CODE (yt))
11445 return 0;
11446
11447 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11448 {
11449 xp = (const char *)TREE_OPTIMIZATION (xt);
11450 yp = (const char *)TREE_OPTIMIZATION (yt);
11451 len = sizeof (struct cl_optimization);
11452 }
11453
11454 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11455 {
11456 xp = (const char *)TREE_TARGET_OPTION (xt);
11457 yp = (const char *)TREE_TARGET_OPTION (yt);
11458 len = sizeof (struct cl_target_option);
11459 }
11460
11461 else
11462 gcc_unreachable ();
11463
11464 return (memcmp (xp, yp, len) == 0);
11465 }
11466
11467 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
11468
11469 tree
11470 build_optimization_node (struct gcc_options *opts)
11471 {
11472 tree t;
11473 void **slot;
11474
11475 /* Use the cache of optimization nodes. */
11476
11477 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11478 opts);
11479
11480 slot = htab_find_slot (cl_option_hash_table, cl_optimization_node, INSERT);
11481 t = (tree) *slot;
11482 if (!t)
11483 {
11484 /* Insert this one into the hash table. */
11485 t = cl_optimization_node;
11486 *slot = t;
11487
11488 /* Make a new node for next time round. */
11489 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11490 }
11491
11492 return t;
11493 }
11494
11495 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
11496
11497 tree
11498 build_target_option_node (struct gcc_options *opts)
11499 {
11500 tree t;
11501 void **slot;
11502
11503 /* Use the cache of optimization nodes. */
11504
11505 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11506 opts);
11507
11508 slot = htab_find_slot (cl_option_hash_table, cl_target_option_node, INSERT);
11509 t = (tree) *slot;
11510 if (!t)
11511 {
11512 /* Insert this one into the hash table. */
11513 t = cl_target_option_node;
11514 *slot = t;
11515
11516 /* Make a new node for next time round. */
11517 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11518 }
11519
11520 return t;
11521 }
11522
11523 /* Determine the "ultimate origin" of a block. The block may be an inlined
11524 instance of an inlined instance of a block which is local to an inline
11525 function, so we have to trace all of the way back through the origin chain
11526 to find out what sort of node actually served as the original seed for the
11527 given block. */
11528
11529 tree
11530 block_ultimate_origin (const_tree block)
11531 {
11532 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
11533
11534 /* output_inline_function sets BLOCK_ABSTRACT_ORIGIN for all the
11535 nodes in the function to point to themselves; ignore that if
11536 we're trying to output the abstract instance of this function. */
11537 if (BLOCK_ABSTRACT (block) && immediate_origin == block)
11538 return NULL_TREE;
11539
11540 if (immediate_origin == NULL_TREE)
11541 return NULL_TREE;
11542 else
11543 {
11544 tree ret_val;
11545 tree lookahead = immediate_origin;
11546
11547 do
11548 {
11549 ret_val = lookahead;
11550 lookahead = (TREE_CODE (ret_val) == BLOCK
11551 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
11552 }
11553 while (lookahead != NULL && lookahead != ret_val);
11554
11555 /* The block's abstract origin chain may not be the *ultimate* origin of
11556 the block. It could lead to a DECL that has an abstract origin set.
11557 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
11558 will give us if it has one). Note that DECL's abstract origins are
11559 supposed to be the most distant ancestor (or so decl_ultimate_origin
11560 claims), so we don't need to loop following the DECL origins. */
11561 if (DECL_P (ret_val))
11562 return DECL_ORIGIN (ret_val);
11563
11564 return ret_val;
11565 }
11566 }
11567
11568 /* Return true if T1 and T2 are equivalent lists. */
11569
11570 bool
11571 list_equal_p (const_tree t1, const_tree t2)
11572 {
11573 for (; t1 && t2; t1 = TREE_CHAIN (t1) , t2 = TREE_CHAIN (t2))
11574 if (TREE_VALUE (t1) != TREE_VALUE (t2))
11575 return false;
11576 return !t1 && !t2;
11577 }
11578
11579 /* Return true iff conversion in EXP generates no instruction. Mark
11580 it inline so that we fully inline into the stripping functions even
11581 though we have two uses of this function. */
11582
11583 static inline bool
11584 tree_nop_conversion (const_tree exp)
11585 {
11586 tree outer_type, inner_type;
11587
11588 if (!CONVERT_EXPR_P (exp)
11589 && TREE_CODE (exp) != NON_LVALUE_EXPR)
11590 return false;
11591 if (TREE_OPERAND (exp, 0) == error_mark_node)
11592 return false;
11593
11594 outer_type = TREE_TYPE (exp);
11595 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11596
11597 if (!inner_type)
11598 return false;
11599
11600 /* Use precision rather then machine mode when we can, which gives
11601 the correct answer even for submode (bit-field) types. */
11602 if ((INTEGRAL_TYPE_P (outer_type)
11603 || POINTER_TYPE_P (outer_type)
11604 || TREE_CODE (outer_type) == OFFSET_TYPE)
11605 && (INTEGRAL_TYPE_P (inner_type)
11606 || POINTER_TYPE_P (inner_type)
11607 || TREE_CODE (inner_type) == OFFSET_TYPE))
11608 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
11609
11610 /* Otherwise fall back on comparing machine modes (e.g. for
11611 aggregate types, floats). */
11612 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
11613 }
11614
11615 /* Return true iff conversion in EXP generates no instruction. Don't
11616 consider conversions changing the signedness. */
11617
11618 static bool
11619 tree_sign_nop_conversion (const_tree exp)
11620 {
11621 tree outer_type, inner_type;
11622
11623 if (!tree_nop_conversion (exp))
11624 return false;
11625
11626 outer_type = TREE_TYPE (exp);
11627 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11628
11629 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
11630 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
11631 }
11632
11633 /* Strip conversions from EXP according to tree_nop_conversion and
11634 return the resulting expression. */
11635
11636 tree
11637 tree_strip_nop_conversions (tree exp)
11638 {
11639 while (tree_nop_conversion (exp))
11640 exp = TREE_OPERAND (exp, 0);
11641 return exp;
11642 }
11643
11644 /* Strip conversions from EXP according to tree_sign_nop_conversion
11645 and return the resulting expression. */
11646
11647 tree
11648 tree_strip_sign_nop_conversions (tree exp)
11649 {
11650 while (tree_sign_nop_conversion (exp))
11651 exp = TREE_OPERAND (exp, 0);
11652 return exp;
11653 }
11654
11655 /* Avoid any floating point extensions from EXP. */
11656 tree
11657 strip_float_extensions (tree exp)
11658 {
11659 tree sub, expt, subt;
11660
11661 /* For floating point constant look up the narrowest type that can hold
11662 it properly and handle it like (type)(narrowest_type)constant.
11663 This way we can optimize for instance a=a*2.0 where "a" is float
11664 but 2.0 is double constant. */
11665 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
11666 {
11667 REAL_VALUE_TYPE orig;
11668 tree type = NULL;
11669
11670 orig = TREE_REAL_CST (exp);
11671 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
11672 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
11673 type = float_type_node;
11674 else if (TYPE_PRECISION (TREE_TYPE (exp))
11675 > TYPE_PRECISION (double_type_node)
11676 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
11677 type = double_type_node;
11678 if (type)
11679 return build_real (type, real_value_truncate (TYPE_MODE (type), orig));
11680 }
11681
11682 if (!CONVERT_EXPR_P (exp))
11683 return exp;
11684
11685 sub = TREE_OPERAND (exp, 0);
11686 subt = TREE_TYPE (sub);
11687 expt = TREE_TYPE (exp);
11688
11689 if (!FLOAT_TYPE_P (subt))
11690 return exp;
11691
11692 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
11693 return exp;
11694
11695 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
11696 return exp;
11697
11698 return strip_float_extensions (sub);
11699 }
11700
11701 /* Strip out all handled components that produce invariant
11702 offsets. */
11703
11704 const_tree
11705 strip_invariant_refs (const_tree op)
11706 {
11707 while (handled_component_p (op))
11708 {
11709 switch (TREE_CODE (op))
11710 {
11711 case ARRAY_REF:
11712 case ARRAY_RANGE_REF:
11713 if (!is_gimple_constant (TREE_OPERAND (op, 1))
11714 || TREE_OPERAND (op, 2) != NULL_TREE
11715 || TREE_OPERAND (op, 3) != NULL_TREE)
11716 return NULL;
11717 break;
11718
11719 case COMPONENT_REF:
11720 if (TREE_OPERAND (op, 2) != NULL_TREE)
11721 return NULL;
11722 break;
11723
11724 default:;
11725 }
11726 op = TREE_OPERAND (op, 0);
11727 }
11728
11729 return op;
11730 }
11731
11732 static GTY(()) tree gcc_eh_personality_decl;
11733
11734 /* Return the GCC personality function decl. */
11735
11736 tree
11737 lhd_gcc_personality (void)
11738 {
11739 if (!gcc_eh_personality_decl)
11740 gcc_eh_personality_decl = build_personality_function ("gcc");
11741 return gcc_eh_personality_decl;
11742 }
11743
11744 /* For languages with One Definition Rule, work out if
11745 trees are actually the same even if the tree representation
11746 differs. This handles only decls appearing in TYPE_NAME
11747 and TYPE_CONTEXT. That is NAMESPACE_DECL, TYPE_DECL,
11748 RECORD_TYPE and IDENTIFIER_NODE. */
11749
11750 static bool
11751 same_for_odr (tree t1, tree t2)
11752 {
11753 if (t1 == t2)
11754 return true;
11755 if (!t1 || !t2)
11756 return false;
11757 /* C and C++ FEs differ by using IDENTIFIER_NODE and TYPE_DECL. */
11758 if (TREE_CODE (t1) == IDENTIFIER_NODE
11759 && TREE_CODE (t2) == TYPE_DECL
11760 && DECL_FILE_SCOPE_P (t1))
11761 {
11762 t2 = DECL_NAME (t2);
11763 gcc_assert (TREE_CODE (t2) == IDENTIFIER_NODE);
11764 }
11765 if (TREE_CODE (t2) == IDENTIFIER_NODE
11766 && TREE_CODE (t1) == TYPE_DECL
11767 && DECL_FILE_SCOPE_P (t2))
11768 {
11769 t1 = DECL_NAME (t1);
11770 gcc_assert (TREE_CODE (t1) == IDENTIFIER_NODE);
11771 }
11772 if (TREE_CODE (t1) != TREE_CODE (t2))
11773 return false;
11774 if (TYPE_P (t1))
11775 return types_same_for_odr (t1, t2);
11776 if (DECL_P (t1))
11777 return decls_same_for_odr (t1, t2);
11778 return false;
11779 }
11780
11781 /* For languages with One Definition Rule, work out if
11782 decls are actually the same even if the tree representation
11783 differs. This handles only decls appearing in TYPE_NAME
11784 and TYPE_CONTEXT. That is NAMESPACE_DECL, TYPE_DECL,
11785 RECORD_TYPE and IDENTIFIER_NODE. */
11786
11787 static bool
11788 decls_same_for_odr (tree decl1, tree decl2)
11789 {
11790 if (decl1 && TREE_CODE (decl1) == TYPE_DECL
11791 && DECL_ORIGINAL_TYPE (decl1))
11792 decl1 = DECL_ORIGINAL_TYPE (decl1);
11793 if (decl2 && TREE_CODE (decl2) == TYPE_DECL
11794 && DECL_ORIGINAL_TYPE (decl2))
11795 decl2 = DECL_ORIGINAL_TYPE (decl2);
11796 if (decl1 == decl2)
11797 return true;
11798 if (!decl1 || !decl2)
11799 return false;
11800 gcc_checking_assert (DECL_P (decl1) && DECL_P (decl2));
11801 if (TREE_CODE (decl1) != TREE_CODE (decl2))
11802 return false;
11803 if (TREE_CODE (decl1) == TRANSLATION_UNIT_DECL)
11804 return true;
11805 if (TREE_CODE (decl1) != NAMESPACE_DECL
11806 && TREE_CODE (decl1) != TYPE_DECL)
11807 return false;
11808 if (!DECL_NAME (decl1))
11809 return false;
11810 gcc_checking_assert (TREE_CODE (DECL_NAME (decl1)) == IDENTIFIER_NODE);
11811 gcc_checking_assert (!DECL_NAME (decl2)
11812 || TREE_CODE (DECL_NAME (decl2)) == IDENTIFIER_NODE);
11813 if (DECL_NAME (decl1) != DECL_NAME (decl2))
11814 return false;
11815 return same_for_odr (DECL_CONTEXT (decl1),
11816 DECL_CONTEXT (decl2));
11817 }
11818
11819 /* For languages with One Definition Rule, work out if
11820 types are same even if the tree representation differs.
11821 This is non-trivial for LTO where minnor differences in
11822 the type representation may have prevented type merging
11823 to merge two copies of otherwise equivalent type. */
11824
11825 bool
11826 types_same_for_odr (tree type1, tree type2)
11827 {
11828 gcc_checking_assert (TYPE_P (type1) && TYPE_P (type2));
11829 type1 = TYPE_MAIN_VARIANT (type1);
11830 type2 = TYPE_MAIN_VARIANT (type2);
11831 if (type1 == type2)
11832 return true;
11833
11834 #ifndef ENABLE_CHECKING
11835 if (!in_lto_p)
11836 return false;
11837 #endif
11838
11839 /* Check for anonymous namespaces. Those have !TREE_PUBLIC
11840 on the corresponding TYPE_STUB_DECL. */
11841 if (type_in_anonymous_namespace_p (type1)
11842 || type_in_anonymous_namespace_p (type2))
11843 return false;
11844 /* When assembler name of virtual table is available, it is
11845 easy to compare types for equivalence. */
11846 if (TYPE_BINFO (type1) && TYPE_BINFO (type2)
11847 && BINFO_VTABLE (TYPE_BINFO (type1))
11848 && BINFO_VTABLE (TYPE_BINFO (type2)))
11849 {
11850 tree v1 = BINFO_VTABLE (TYPE_BINFO (type1));
11851 tree v2 = BINFO_VTABLE (TYPE_BINFO (type2));
11852
11853 if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
11854 {
11855 if (TREE_CODE (v2) != POINTER_PLUS_EXPR
11856 || !operand_equal_p (TREE_OPERAND (v1, 1),
11857 TREE_OPERAND (v2, 1), 0))
11858 return false;
11859 v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
11860 v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
11861 }
11862 v1 = DECL_ASSEMBLER_NAME (v1);
11863 v2 = DECL_ASSEMBLER_NAME (v2);
11864 return (v1 == v2);
11865 }
11866
11867 /* FIXME: the code comparing type names consider all instantiations of the
11868 same template to have same name. This is because we have no access
11869 to template parameters. For types with no virtual method tables
11870 we thus can return false positives. At the moment we do not need
11871 to compare types in other scenarios than devirtualization. */
11872
11873 /* If types are not structuraly same, do not bother to contnue.
11874 Match in the remainder of code would mean ODR violation. */
11875 if (!types_compatible_p (type1, type2))
11876 return false;
11877 if (!TYPE_NAME (type1))
11878 return false;
11879 if (!decls_same_for_odr (TYPE_NAME (type1), TYPE_NAME (type2)))
11880 return false;
11881 if (!same_for_odr (TYPE_CONTEXT (type1), TYPE_CONTEXT (type2)))
11882 return false;
11883 /* When not in LTO the MAIN_VARIANT check should be the same. */
11884 gcc_assert (in_lto_p);
11885
11886 return true;
11887 }
11888
11889 /* TARGET is a call target of GIMPLE call statement
11890 (obtained by gimple_call_fn). Return true if it is
11891 OBJ_TYPE_REF representing an virtual call of C++ method.
11892 (As opposed to OBJ_TYPE_REF representing objc calls
11893 through a cast where middle-end devirtualization machinery
11894 can't apply.) */
11895
11896 bool
11897 virtual_method_call_p (tree target)
11898 {
11899 if (TREE_CODE (target) != OBJ_TYPE_REF)
11900 return false;
11901 target = TREE_TYPE (target);
11902 gcc_checking_assert (TREE_CODE (target) == POINTER_TYPE);
11903 target = TREE_TYPE (target);
11904 if (TREE_CODE (target) == FUNCTION_TYPE)
11905 return false;
11906 gcc_checking_assert (TREE_CODE (target) == METHOD_TYPE);
11907 return true;
11908 }
11909
11910 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
11911
11912 tree
11913 obj_type_ref_class (tree ref)
11914 {
11915 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
11916 ref = TREE_TYPE (ref);
11917 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
11918 ref = TREE_TYPE (ref);
11919 /* We look for type THIS points to. ObjC also builds
11920 OBJ_TYPE_REF with non-method calls, Their first parameter
11921 ID however also corresponds to class type. */
11922 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
11923 || TREE_CODE (ref) == FUNCTION_TYPE);
11924 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
11925 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
11926 return TREE_TYPE (ref);
11927 }
11928
11929 /* Return true if T is in anonymous namespace. */
11930
11931 bool
11932 type_in_anonymous_namespace_p (tree t)
11933 {
11934 return (TYPE_STUB_DECL (t) && !TREE_PUBLIC (TYPE_STUB_DECL (t)));
11935 }
11936
11937 /* Try to find a base info of BINFO that would have its field decl at offset
11938 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
11939 found, return, otherwise return NULL_TREE. */
11940
11941 tree
11942 get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
11943 {
11944 tree type = BINFO_TYPE (binfo);
11945
11946 while (true)
11947 {
11948 HOST_WIDE_INT pos, size;
11949 tree fld;
11950 int i;
11951
11952 if (types_same_for_odr (type, expected_type))
11953 return binfo;
11954 if (offset < 0)
11955 return NULL_TREE;
11956
11957 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
11958 {
11959 if (TREE_CODE (fld) != FIELD_DECL)
11960 continue;
11961
11962 pos = int_bit_position (fld);
11963 size = tree_to_uhwi (DECL_SIZE (fld));
11964 if (pos <= offset && (pos + size) > offset)
11965 break;
11966 }
11967 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
11968 return NULL_TREE;
11969
11970 if (!DECL_ARTIFICIAL (fld))
11971 {
11972 binfo = TYPE_BINFO (TREE_TYPE (fld));
11973 if (!binfo)
11974 return NULL_TREE;
11975 }
11976 /* Offset 0 indicates the primary base, whose vtable contents are
11977 represented in the binfo for the derived class. */
11978 else if (offset != 0)
11979 {
11980 tree base_binfo, found_binfo = NULL_TREE;
11981 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
11982 if (types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
11983 {
11984 found_binfo = base_binfo;
11985 break;
11986 }
11987 if (!found_binfo)
11988 return NULL_TREE;
11989 binfo = found_binfo;
11990 }
11991
11992 type = TREE_TYPE (fld);
11993 offset -= pos;
11994 }
11995 }
11996
11997 /* Returns true if X is a typedef decl. */
11998
11999 bool
12000 is_typedef_decl (tree x)
12001 {
12002 return (x && TREE_CODE (x) == TYPE_DECL
12003 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
12004 }
12005
12006 /* Returns true iff TYPE is a type variant created for a typedef. */
12007
12008 bool
12009 typedef_variant_p (tree type)
12010 {
12011 return is_typedef_decl (TYPE_NAME (type));
12012 }
12013
12014 /* Warn about a use of an identifier which was marked deprecated. */
12015 void
12016 warn_deprecated_use (tree node, tree attr)
12017 {
12018 const char *msg;
12019
12020 if (node == 0 || !warn_deprecated_decl)
12021 return;
12022
12023 if (!attr)
12024 {
12025 if (DECL_P (node))
12026 attr = DECL_ATTRIBUTES (node);
12027 else if (TYPE_P (node))
12028 {
12029 tree decl = TYPE_STUB_DECL (node);
12030 if (decl)
12031 attr = lookup_attribute ("deprecated",
12032 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12033 }
12034 }
12035
12036 if (attr)
12037 attr = lookup_attribute ("deprecated", attr);
12038
12039 if (attr)
12040 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
12041 else
12042 msg = NULL;
12043
12044 if (DECL_P (node))
12045 {
12046 expanded_location xloc = expand_location (DECL_SOURCE_LOCATION (node));
12047 if (msg)
12048 warning (OPT_Wdeprecated_declarations,
12049 "%qD is deprecated (declared at %r%s:%d%R): %s",
12050 node, "locus", xloc.file, xloc.line, msg);
12051 else
12052 warning (OPT_Wdeprecated_declarations,
12053 "%qD is deprecated (declared at %r%s:%d%R)",
12054 node, "locus", xloc.file, xloc.line);
12055 }
12056 else if (TYPE_P (node))
12057 {
12058 tree what = NULL_TREE;
12059 tree decl = TYPE_STUB_DECL (node);
12060
12061 if (TYPE_NAME (node))
12062 {
12063 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12064 what = TYPE_NAME (node);
12065 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12066 && DECL_NAME (TYPE_NAME (node)))
12067 what = DECL_NAME (TYPE_NAME (node));
12068 }
12069
12070 if (decl)
12071 {
12072 expanded_location xloc
12073 = expand_location (DECL_SOURCE_LOCATION (decl));
12074 if (what)
12075 {
12076 if (msg)
12077 warning (OPT_Wdeprecated_declarations,
12078 "%qE is deprecated (declared at %r%s:%d%R): %s",
12079 what, "locus", xloc.file, xloc.line, msg);
12080 else
12081 warning (OPT_Wdeprecated_declarations,
12082 "%qE is deprecated (declared at %r%s:%d%R)",
12083 what, "locus", xloc.file, xloc.line);
12084 }
12085 else
12086 {
12087 if (msg)
12088 warning (OPT_Wdeprecated_declarations,
12089 "type is deprecated (declared at %r%s:%d%R): %s",
12090 "locus", xloc.file, xloc.line, msg);
12091 else
12092 warning (OPT_Wdeprecated_declarations,
12093 "type is deprecated (declared at %r%s:%d%R)",
12094 "locus", xloc.file, xloc.line);
12095 }
12096 }
12097 else
12098 {
12099 if (what)
12100 {
12101 if (msg)
12102 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
12103 what, msg);
12104 else
12105 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
12106 }
12107 else
12108 {
12109 if (msg)
12110 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
12111 msg);
12112 else
12113 warning (OPT_Wdeprecated_declarations, "type is deprecated");
12114 }
12115 }
12116 }
12117 }
12118
12119 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12120 somewhere in it. */
12121
12122 bool
12123 contains_bitfld_component_ref_p (const_tree ref)
12124 {
12125 while (handled_component_p (ref))
12126 {
12127 if (TREE_CODE (ref) == COMPONENT_REF
12128 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12129 return true;
12130 ref = TREE_OPERAND (ref, 0);
12131 }
12132
12133 return false;
12134 }
12135
12136 /* Try to determine whether a TRY_CATCH expression can fall through.
12137 This is a subroutine of block_may_fallthru. */
12138
12139 static bool
12140 try_catch_may_fallthru (const_tree stmt)
12141 {
12142 tree_stmt_iterator i;
12143
12144 /* If the TRY block can fall through, the whole TRY_CATCH can
12145 fall through. */
12146 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12147 return true;
12148
12149 i = tsi_start (TREE_OPERAND (stmt, 1));
12150 switch (TREE_CODE (tsi_stmt (i)))
12151 {
12152 case CATCH_EXPR:
12153 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12154 catch expression and a body. The whole TRY_CATCH may fall
12155 through iff any of the catch bodies falls through. */
12156 for (; !tsi_end_p (i); tsi_next (&i))
12157 {
12158 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12159 return true;
12160 }
12161 return false;
12162
12163 case EH_FILTER_EXPR:
12164 /* The exception filter expression only matters if there is an
12165 exception. If the exception does not match EH_FILTER_TYPES,
12166 we will execute EH_FILTER_FAILURE, and we will fall through
12167 if that falls through. If the exception does match
12168 EH_FILTER_TYPES, the stack unwinder will continue up the
12169 stack, so we will not fall through. We don't know whether we
12170 will throw an exception which matches EH_FILTER_TYPES or not,
12171 so we just ignore EH_FILTER_TYPES and assume that we might
12172 throw an exception which doesn't match. */
12173 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12174
12175 default:
12176 /* This case represents statements to be executed when an
12177 exception occurs. Those statements are implicitly followed
12178 by a RESX statement to resume execution after the exception.
12179 So in this case the TRY_CATCH never falls through. */
12180 return false;
12181 }
12182 }
12183
12184 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12185 need not be 100% accurate; simply be conservative and return true if we
12186 don't know. This is used only to avoid stupidly generating extra code.
12187 If we're wrong, we'll just delete the extra code later. */
12188
12189 bool
12190 block_may_fallthru (const_tree block)
12191 {
12192 /* This CONST_CAST is okay because expr_last returns its argument
12193 unmodified and we assign it to a const_tree. */
12194 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12195
12196 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12197 {
12198 case GOTO_EXPR:
12199 case RETURN_EXPR:
12200 /* Easy cases. If the last statement of the block implies
12201 control transfer, then we can't fall through. */
12202 return false;
12203
12204 case SWITCH_EXPR:
12205 /* If SWITCH_LABELS is set, this is lowered, and represents a
12206 branch to a selected label and hence can not fall through.
12207 Otherwise SWITCH_BODY is set, and the switch can fall
12208 through. */
12209 return SWITCH_LABELS (stmt) == NULL_TREE;
12210
12211 case COND_EXPR:
12212 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12213 return true;
12214 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12215
12216 case BIND_EXPR:
12217 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12218
12219 case TRY_CATCH_EXPR:
12220 return try_catch_may_fallthru (stmt);
12221
12222 case TRY_FINALLY_EXPR:
12223 /* The finally clause is always executed after the try clause,
12224 so if it does not fall through, then the try-finally will not
12225 fall through. Otherwise, if the try clause does not fall
12226 through, then when the finally clause falls through it will
12227 resume execution wherever the try clause was going. So the
12228 whole try-finally will only fall through if both the try
12229 clause and the finally clause fall through. */
12230 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12231 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12232
12233 case MODIFY_EXPR:
12234 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12235 stmt = TREE_OPERAND (stmt, 1);
12236 else
12237 return true;
12238 /* FALLTHRU */
12239
12240 case CALL_EXPR:
12241 /* Functions that do not return do not fall through. */
12242 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12243
12244 case CLEANUP_POINT_EXPR:
12245 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12246
12247 case TARGET_EXPR:
12248 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12249
12250 case ERROR_MARK:
12251 return true;
12252
12253 default:
12254 return lang_hooks.block_may_fallthru (stmt);
12255 }
12256 }
12257
12258 /* True if we are using EH to handle cleanups. */
12259 static bool using_eh_for_cleanups_flag = false;
12260
12261 /* This routine is called from front ends to indicate eh should be used for
12262 cleanups. */
12263 void
12264 using_eh_for_cleanups (void)
12265 {
12266 using_eh_for_cleanups_flag = true;
12267 }
12268
12269 /* Query whether EH is used for cleanups. */
12270 bool
12271 using_eh_for_cleanups_p (void)
12272 {
12273 return using_eh_for_cleanups_flag;
12274 }
12275
12276 /* Wrapper for tree_code_name to ensure that tree code is valid */
12277 const char *
12278 get_tree_code_name (enum tree_code code)
12279 {
12280 const char *invalid = "<invalid tree code>";
12281
12282 if (code >= MAX_TREE_CODES)
12283 return invalid;
12284
12285 return tree_code_name[code];
12286 }
12287
12288 #include "gt-tree.h"