]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree.c
Merge in trunk.
[thirdparty/gcc.git] / gcc / tree.c
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
26
27 It is intended to be language-independent, but occasionally
28 calls language-dependent routines defined (for C) in typecheck.c. */
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "tm.h"
34 #include "flags.h"
35 #include "tree.h"
36 #include "tm_p.h"
37 #include "function.h"
38 #include "obstack.h"
39 #include "toplev.h" /* get_random_seed */
40 #include "ggc.h"
41 #include "hashtab.h"
42 #include "filenames.h"
43 #include "output.h"
44 #include "target.h"
45 #include "common/common-target.h"
46 #include "langhooks.h"
47 #include "tree-inline.h"
48 #include "tree-iterator.h"
49 #include "basic-block.h"
50 #include "bitmap.h"
51 #include "gimple.h"
52 #include "gimple-ssa.h"
53 #include "cgraph.h"
54 #include "tree-phinodes.h"
55 #include "tree-ssanames.h"
56 #include "tree-dfa.h"
57 #include "params.h"
58 #include "pointer-set.h"
59 #include "tree-pass.h"
60 #include "langhooks-def.h"
61 #include "diagnostic.h"
62 #include "tree-diagnostic.h"
63 #include "tree-pretty-print.h"
64 #include "except.h"
65 #include "debug.h"
66 #include "intl.h"
67 #include "wide-int.h"
68
69 /* Tree code classes. */
70
71 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
72 #define END_OF_BASE_TREE_CODES tcc_exceptional,
73
74 const enum tree_code_class tree_code_type[] = {
75 #include "all-tree.def"
76 };
77
78 #undef DEFTREECODE
79 #undef END_OF_BASE_TREE_CODES
80
81 /* Table indexed by tree code giving number of expression
82 operands beyond the fixed part of the node structure.
83 Not used for types or decls. */
84
85 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
86 #define END_OF_BASE_TREE_CODES 0,
87
88 const unsigned char tree_code_length[] = {
89 #include "all-tree.def"
90 };
91
92 #undef DEFTREECODE
93 #undef END_OF_BASE_TREE_CODES
94
95 /* Names of tree components.
96 Used for printing out the tree and error messages. */
97 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
98 #define END_OF_BASE_TREE_CODES "@dummy",
99
100 static const char *const tree_code_name[] = {
101 #include "all-tree.def"
102 };
103
104 #undef DEFTREECODE
105 #undef END_OF_BASE_TREE_CODES
106
107 /* Each tree code class has an associated string representation.
108 These must correspond to the tree_code_class entries. */
109
110 const char *const tree_code_class_strings[] =
111 {
112 "exceptional",
113 "constant",
114 "type",
115 "declaration",
116 "reference",
117 "comparison",
118 "unary",
119 "binary",
120 "statement",
121 "vl_exp",
122 "expression"
123 };
124
125 /* obstack.[ch] explicitly declined to prototype this. */
126 extern int _obstack_allocated_p (struct obstack *h, void *obj);
127
128 /* Statistics-gathering stuff. */
129
130 static int tree_code_counts[MAX_TREE_CODES];
131 int tree_node_counts[(int) all_kinds];
132 int tree_node_sizes[(int) all_kinds];
133
134 /* Keep in sync with tree.h:enum tree_node_kind. */
135 static const char * const tree_node_kind_names[] = {
136 "decls",
137 "types",
138 "blocks",
139 "stmts",
140 "refs",
141 "exprs",
142 "constants",
143 "identifiers",
144 "vecs",
145 "binfos",
146 "ssa names",
147 "constructors",
148 "random kinds",
149 "lang_decl kinds",
150 "lang_type kinds",
151 "omp clauses",
152 };
153
154 /* Unique id for next decl created. */
155 static GTY(()) int next_decl_uid;
156 /* Unique id for next type created. */
157 static GTY(()) int next_type_uid = 1;
158 /* Unique id for next debug decl created. Use negative numbers,
159 to catch erroneous uses. */
160 static GTY(()) int next_debug_decl_uid;
161
162 /* Since we cannot rehash a type after it is in the table, we have to
163 keep the hash code. */
164
165 struct GTY(()) type_hash {
166 unsigned long hash;
167 tree type;
168 };
169
170 /* Initial size of the hash table (rounded to next prime). */
171 #define TYPE_HASH_INITIAL_SIZE 1000
172
173 /* Now here is the hash table. When recording a type, it is added to
174 the slot whose index is the hash code. Note that the hash table is
175 used for several kinds of types (function types, array types and
176 array index range types, for now). While all these live in the
177 same table, they are completely independent, and the hash code is
178 computed differently for each of these. */
179
180 static GTY ((if_marked ("type_hash_marked_p"), param_is (struct type_hash)))
181 htab_t type_hash_table;
182
183 /* Hash table and temporary node for larger integer const values. */
184 static GTY (()) tree int_cst_node;
185 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
186 htab_t int_cst_hash_table;
187
188 /* Hash table for optimization flags and target option flags. Use the same
189 hash table for both sets of options. Nodes for building the current
190 optimization and target option nodes. The assumption is most of the time
191 the options created will already be in the hash table, so we avoid
192 allocating and freeing up a node repeatably. */
193 static GTY (()) tree cl_optimization_node;
194 static GTY (()) tree cl_target_option_node;
195 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
196 htab_t cl_option_hash_table;
197
198 /* General tree->tree mapping structure for use in hash tables. */
199
200
201 static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map)))
202 htab_t debug_expr_for_decl;
203
204 static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map)))
205 htab_t value_expr_for_decl;
206
207 static GTY ((if_marked ("tree_vec_map_marked_p"), param_is (struct tree_vec_map)))
208 htab_t debug_args_for_decl;
209
210 static GTY ((if_marked ("tree_priority_map_marked_p"),
211 param_is (struct tree_priority_map)))
212 htab_t init_priority_for_decl;
213
214 static void set_type_quals (tree, int);
215 static int type_hash_eq (const void *, const void *);
216 static hashval_t type_hash_hash (const void *);
217 static hashval_t int_cst_hash_hash (const void *);
218 static int int_cst_hash_eq (const void *, const void *);
219 static hashval_t cl_option_hash_hash (const void *);
220 static int cl_option_hash_eq (const void *, const void *);
221 static void print_type_hash_statistics (void);
222 static void print_debug_expr_statistics (void);
223 static void print_value_expr_statistics (void);
224 static int type_hash_marked_p (const void *);
225 static unsigned int type_hash_list (const_tree, hashval_t);
226 static unsigned int attribute_hash_list (const_tree, hashval_t);
227 static bool decls_same_for_odr (tree decl1, tree decl2);
228
229 tree global_trees[TI_MAX];
230 tree integer_types[itk_none];
231
232 unsigned char tree_contains_struct[MAX_TREE_CODES][64];
233
234 /* Number of operands for each OpenMP clause. */
235 unsigned const char omp_clause_num_ops[] =
236 {
237 0, /* OMP_CLAUSE_ERROR */
238 1, /* OMP_CLAUSE_PRIVATE */
239 1, /* OMP_CLAUSE_SHARED */
240 1, /* OMP_CLAUSE_FIRSTPRIVATE */
241 2, /* OMP_CLAUSE_LASTPRIVATE */
242 4, /* OMP_CLAUSE_REDUCTION */
243 1, /* OMP_CLAUSE_COPYIN */
244 1, /* OMP_CLAUSE_COPYPRIVATE */
245 2, /* OMP_CLAUSE_LINEAR */
246 2, /* OMP_CLAUSE_ALIGNED */
247 1, /* OMP_CLAUSE_DEPEND */
248 1, /* OMP_CLAUSE_UNIFORM */
249 2, /* OMP_CLAUSE_FROM */
250 2, /* OMP_CLAUSE_TO */
251 2, /* OMP_CLAUSE_MAP */
252 1, /* OMP_CLAUSE__LOOPTEMP_ */
253 1, /* OMP_CLAUSE_IF */
254 1, /* OMP_CLAUSE_NUM_THREADS */
255 1, /* OMP_CLAUSE_SCHEDULE */
256 0, /* OMP_CLAUSE_NOWAIT */
257 0, /* OMP_CLAUSE_ORDERED */
258 0, /* OMP_CLAUSE_DEFAULT */
259 3, /* OMP_CLAUSE_COLLAPSE */
260 0, /* OMP_CLAUSE_UNTIED */
261 1, /* OMP_CLAUSE_FINAL */
262 0, /* OMP_CLAUSE_MERGEABLE */
263 1, /* OMP_CLAUSE_DEVICE */
264 1, /* OMP_CLAUSE_DIST_SCHEDULE */
265 0, /* OMP_CLAUSE_INBRANCH */
266 0, /* OMP_CLAUSE_NOTINBRANCH */
267 1, /* OMP_CLAUSE_NUM_TEAMS */
268 1, /* OMP_CLAUSE_THREAD_LIMIT */
269 0, /* OMP_CLAUSE_PROC_BIND */
270 1, /* OMP_CLAUSE_SAFELEN */
271 1, /* OMP_CLAUSE_SIMDLEN */
272 0, /* OMP_CLAUSE_FOR */
273 0, /* OMP_CLAUSE_PARALLEL */
274 0, /* OMP_CLAUSE_SECTIONS */
275 0, /* OMP_CLAUSE_TASKGROUP */
276 1, /* OMP_CLAUSE__SIMDUID_ */
277 };
278
279 const char * const omp_clause_code_name[] =
280 {
281 "error_clause",
282 "private",
283 "shared",
284 "firstprivate",
285 "lastprivate",
286 "reduction",
287 "copyin",
288 "copyprivate",
289 "linear",
290 "aligned",
291 "depend",
292 "uniform",
293 "from",
294 "to",
295 "map",
296 "_looptemp_",
297 "if",
298 "num_threads",
299 "schedule",
300 "nowait",
301 "ordered",
302 "default",
303 "collapse",
304 "untied",
305 "final",
306 "mergeable",
307 "device",
308 "dist_schedule",
309 "inbranch",
310 "notinbranch",
311 "num_teams",
312 "thread_limit",
313 "proc_bind",
314 "safelen",
315 "simdlen",
316 "for",
317 "parallel",
318 "sections",
319 "taskgroup",
320 "_simduid_"
321 };
322
323
324 /* Return the tree node structure used by tree code CODE. */
325
326 static inline enum tree_node_structure_enum
327 tree_node_structure_for_code (enum tree_code code)
328 {
329 switch (TREE_CODE_CLASS (code))
330 {
331 case tcc_declaration:
332 {
333 switch (code)
334 {
335 case FIELD_DECL:
336 return TS_FIELD_DECL;
337 case PARM_DECL:
338 return TS_PARM_DECL;
339 case VAR_DECL:
340 return TS_VAR_DECL;
341 case LABEL_DECL:
342 return TS_LABEL_DECL;
343 case RESULT_DECL:
344 return TS_RESULT_DECL;
345 case DEBUG_EXPR_DECL:
346 return TS_DECL_WRTL;
347 case CONST_DECL:
348 return TS_CONST_DECL;
349 case TYPE_DECL:
350 return TS_TYPE_DECL;
351 case FUNCTION_DECL:
352 return TS_FUNCTION_DECL;
353 case TRANSLATION_UNIT_DECL:
354 return TS_TRANSLATION_UNIT_DECL;
355 default:
356 return TS_DECL_NON_COMMON;
357 }
358 }
359 case tcc_type:
360 return TS_TYPE_NON_COMMON;
361 case tcc_reference:
362 case tcc_comparison:
363 case tcc_unary:
364 case tcc_binary:
365 case tcc_expression:
366 case tcc_statement:
367 case tcc_vl_exp:
368 return TS_EXP;
369 default: /* tcc_constant and tcc_exceptional */
370 break;
371 }
372 switch (code)
373 {
374 /* tcc_constant cases. */
375 case INTEGER_CST: return TS_INT_CST;
376 case REAL_CST: return TS_REAL_CST;
377 case FIXED_CST: return TS_FIXED_CST;
378 case COMPLEX_CST: return TS_COMPLEX;
379 case VECTOR_CST: return TS_VECTOR;
380 case STRING_CST: return TS_STRING;
381 /* tcc_exceptional cases. */
382 case ERROR_MARK: return TS_COMMON;
383 case IDENTIFIER_NODE: return TS_IDENTIFIER;
384 case TREE_LIST: return TS_LIST;
385 case TREE_VEC: return TS_VEC;
386 case SSA_NAME: return TS_SSA_NAME;
387 case PLACEHOLDER_EXPR: return TS_COMMON;
388 case STATEMENT_LIST: return TS_STATEMENT_LIST;
389 case BLOCK: return TS_BLOCK;
390 case CONSTRUCTOR: return TS_CONSTRUCTOR;
391 case TREE_BINFO: return TS_BINFO;
392 case OMP_CLAUSE: return TS_OMP_CLAUSE;
393 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
394 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
395
396 default:
397 gcc_unreachable ();
398 }
399 }
400
401
402 /* Initialize tree_contains_struct to describe the hierarchy of tree
403 nodes. */
404
405 static void
406 initialize_tree_contains_struct (void)
407 {
408 unsigned i;
409
410 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
411 {
412 enum tree_code code;
413 enum tree_node_structure_enum ts_code;
414
415 code = (enum tree_code) i;
416 ts_code = tree_node_structure_for_code (code);
417
418 /* Mark the TS structure itself. */
419 tree_contains_struct[code][ts_code] = 1;
420
421 /* Mark all the structures that TS is derived from. */
422 switch (ts_code)
423 {
424 case TS_TYPED:
425 case TS_BLOCK:
426 MARK_TS_BASE (code);
427 break;
428
429 case TS_COMMON:
430 case TS_INT_CST:
431 case TS_REAL_CST:
432 case TS_FIXED_CST:
433 case TS_VECTOR:
434 case TS_STRING:
435 case TS_COMPLEX:
436 case TS_SSA_NAME:
437 case TS_CONSTRUCTOR:
438 case TS_EXP:
439 case TS_STATEMENT_LIST:
440 MARK_TS_TYPED (code);
441 break;
442
443 case TS_IDENTIFIER:
444 case TS_DECL_MINIMAL:
445 case TS_TYPE_COMMON:
446 case TS_LIST:
447 case TS_VEC:
448 case TS_BINFO:
449 case TS_OMP_CLAUSE:
450 case TS_OPTIMIZATION:
451 case TS_TARGET_OPTION:
452 MARK_TS_COMMON (code);
453 break;
454
455 case TS_TYPE_WITH_LANG_SPECIFIC:
456 MARK_TS_TYPE_COMMON (code);
457 break;
458
459 case TS_TYPE_NON_COMMON:
460 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
461 break;
462
463 case TS_DECL_COMMON:
464 MARK_TS_DECL_MINIMAL (code);
465 break;
466
467 case TS_DECL_WRTL:
468 case TS_CONST_DECL:
469 MARK_TS_DECL_COMMON (code);
470 break;
471
472 case TS_DECL_NON_COMMON:
473 MARK_TS_DECL_WITH_VIS (code);
474 break;
475
476 case TS_DECL_WITH_VIS:
477 case TS_PARM_DECL:
478 case TS_LABEL_DECL:
479 case TS_RESULT_DECL:
480 MARK_TS_DECL_WRTL (code);
481 break;
482
483 case TS_FIELD_DECL:
484 MARK_TS_DECL_COMMON (code);
485 break;
486
487 case TS_VAR_DECL:
488 MARK_TS_DECL_WITH_VIS (code);
489 break;
490
491 case TS_TYPE_DECL:
492 case TS_FUNCTION_DECL:
493 MARK_TS_DECL_NON_COMMON (code);
494 break;
495
496 case TS_TRANSLATION_UNIT_DECL:
497 MARK_TS_DECL_COMMON (code);
498 break;
499
500 default:
501 gcc_unreachable ();
502 }
503 }
504
505 /* Basic consistency checks for attributes used in fold. */
506 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
507 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
508 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
509 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
510 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
511 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
512 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
513 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
514 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
515 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
516 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
517 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
518 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
519 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
520 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
521 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
522 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
523 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
524 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
525 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
526 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
527 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
528 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
529 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
530 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
531 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
532 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
533 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
534 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
535 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
536 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
537 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
538 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
539 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
540 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
541 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
542 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
543 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
544 }
545
546
547 /* Init tree.c. */
548
549 void
550 init_ttree (void)
551 {
552 /* Initialize the hash table of types. */
553 type_hash_table = htab_create_ggc (TYPE_HASH_INITIAL_SIZE, type_hash_hash,
554 type_hash_eq, 0);
555
556 debug_expr_for_decl = htab_create_ggc (512, tree_decl_map_hash,
557 tree_decl_map_eq, 0);
558
559 value_expr_for_decl = htab_create_ggc (512, tree_decl_map_hash,
560 tree_decl_map_eq, 0);
561 init_priority_for_decl = htab_create_ggc (512, tree_priority_map_hash,
562 tree_priority_map_eq, 0);
563
564 int_cst_hash_table = htab_create_ggc (1024, int_cst_hash_hash,
565 int_cst_hash_eq, NULL);
566
567 int_cst_node = make_int_cst (1, 1);
568
569 cl_option_hash_table = htab_create_ggc (64, cl_option_hash_hash,
570 cl_option_hash_eq, NULL);
571
572 cl_optimization_node = make_node (OPTIMIZATION_NODE);
573 cl_target_option_node = make_node (TARGET_OPTION_NODE);
574
575 /* Initialize the tree_contains_struct array. */
576 initialize_tree_contains_struct ();
577 lang_hooks.init_ts ();
578 }
579
580 \f
581 /* The name of the object as the assembler will see it (but before any
582 translations made by ASM_OUTPUT_LABELREF). Often this is the same
583 as DECL_NAME. It is an IDENTIFIER_NODE. */
584 tree
585 decl_assembler_name (tree decl)
586 {
587 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
588 lang_hooks.set_decl_assembler_name (decl);
589 return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name;
590 }
591
592 /* Compare ASMNAME with the DECL_ASSEMBLER_NAME of DECL. */
593
594 bool
595 decl_assembler_name_equal (tree decl, const_tree asmname)
596 {
597 tree decl_asmname = DECL_ASSEMBLER_NAME (decl);
598 const char *decl_str;
599 const char *asmname_str;
600 bool test = false;
601
602 if (decl_asmname == asmname)
603 return true;
604
605 decl_str = IDENTIFIER_POINTER (decl_asmname);
606 asmname_str = IDENTIFIER_POINTER (asmname);
607
608
609 /* If the target assembler name was set by the user, things are trickier.
610 We have a leading '*' to begin with. After that, it's arguable what
611 is the correct thing to do with -fleading-underscore. Arguably, we've
612 historically been doing the wrong thing in assemble_alias by always
613 printing the leading underscore. Since we're not changing that, make
614 sure user_label_prefix follows the '*' before matching. */
615 if (decl_str[0] == '*')
616 {
617 size_t ulp_len = strlen (user_label_prefix);
618
619 decl_str ++;
620
621 if (ulp_len == 0)
622 test = true;
623 else if (strncmp (decl_str, user_label_prefix, ulp_len) == 0)
624 decl_str += ulp_len, test=true;
625 else
626 decl_str --;
627 }
628 if (asmname_str[0] == '*')
629 {
630 size_t ulp_len = strlen (user_label_prefix);
631
632 asmname_str ++;
633
634 if (ulp_len == 0)
635 test = true;
636 else if (strncmp (asmname_str, user_label_prefix, ulp_len) == 0)
637 asmname_str += ulp_len, test=true;
638 else
639 asmname_str --;
640 }
641
642 if (!test)
643 return false;
644 return strcmp (decl_str, asmname_str) == 0;
645 }
646
647 /* Hash asmnames ignoring the user specified marks. */
648
649 hashval_t
650 decl_assembler_name_hash (const_tree asmname)
651 {
652 if (IDENTIFIER_POINTER (asmname)[0] == '*')
653 {
654 const char *decl_str = IDENTIFIER_POINTER (asmname) + 1;
655 size_t ulp_len = strlen (user_label_prefix);
656
657 if (ulp_len == 0)
658 ;
659 else if (strncmp (decl_str, user_label_prefix, ulp_len) == 0)
660 decl_str += ulp_len;
661
662 return htab_hash_string (decl_str);
663 }
664
665 return htab_hash_string (IDENTIFIER_POINTER (asmname));
666 }
667
668 /* Compute the number of bytes occupied by a tree with code CODE.
669 This function cannot be used for nodes that have variable sizes,
670 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
671 size_t
672 tree_code_size (enum tree_code code)
673 {
674 switch (TREE_CODE_CLASS (code))
675 {
676 case tcc_declaration: /* A decl node */
677 {
678 switch (code)
679 {
680 case FIELD_DECL:
681 return sizeof (struct tree_field_decl);
682 case PARM_DECL:
683 return sizeof (struct tree_parm_decl);
684 case VAR_DECL:
685 return sizeof (struct tree_var_decl);
686 case LABEL_DECL:
687 return sizeof (struct tree_label_decl);
688 case RESULT_DECL:
689 return sizeof (struct tree_result_decl);
690 case CONST_DECL:
691 return sizeof (struct tree_const_decl);
692 case TYPE_DECL:
693 return sizeof (struct tree_type_decl);
694 case FUNCTION_DECL:
695 return sizeof (struct tree_function_decl);
696 case DEBUG_EXPR_DECL:
697 return sizeof (struct tree_decl_with_rtl);
698 default:
699 return sizeof (struct tree_decl_non_common);
700 }
701 }
702
703 case tcc_type: /* a type node */
704 return sizeof (struct tree_type_non_common);
705
706 case tcc_reference: /* a reference */
707 case tcc_expression: /* an expression */
708 case tcc_statement: /* an expression with side effects */
709 case tcc_comparison: /* a comparison expression */
710 case tcc_unary: /* a unary arithmetic expression */
711 case tcc_binary: /* a binary arithmetic expression */
712 return (sizeof (struct tree_exp)
713 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
714
715 case tcc_constant: /* a constant */
716 switch (code)
717 {
718 case INTEGER_CST: gcc_unreachable ();
719 case REAL_CST: return sizeof (struct tree_real_cst);
720 case FIXED_CST: return sizeof (struct tree_fixed_cst);
721 case COMPLEX_CST: return sizeof (struct tree_complex);
722 case VECTOR_CST: return sizeof (struct tree_vector);
723 case STRING_CST: gcc_unreachable ();
724 default:
725 return lang_hooks.tree_size (code);
726 }
727
728 case tcc_exceptional: /* something random, like an identifier. */
729 switch (code)
730 {
731 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
732 case TREE_LIST: return sizeof (struct tree_list);
733
734 case ERROR_MARK:
735 case PLACEHOLDER_EXPR: return sizeof (struct tree_common);
736
737 case TREE_VEC:
738 case OMP_CLAUSE: gcc_unreachable ();
739
740 case SSA_NAME: return sizeof (struct tree_ssa_name);
741
742 case STATEMENT_LIST: return sizeof (struct tree_statement_list);
743 case BLOCK: return sizeof (struct tree_block);
744 case CONSTRUCTOR: return sizeof (struct tree_constructor);
745 case OPTIMIZATION_NODE: return sizeof (struct tree_optimization_option);
746 case TARGET_OPTION_NODE: return sizeof (struct tree_target_option);
747
748 default:
749 return lang_hooks.tree_size (code);
750 }
751
752 default:
753 gcc_unreachable ();
754 }
755 }
756
757 /* Compute the number of bytes occupied by NODE. This routine only
758 looks at TREE_CODE, except for those nodes that have variable sizes. */
759 size_t
760 tree_size (const_tree node)
761 {
762 const enum tree_code code = TREE_CODE (node);
763 switch (code)
764 {
765 case INTEGER_CST:
766 return (sizeof (struct tree_int_cst)
767 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
768
769 case TREE_BINFO:
770 return (offsetof (struct tree_binfo, base_binfos)
771 + vec<tree, va_gc>
772 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
773
774 case TREE_VEC:
775 return (sizeof (struct tree_vec)
776 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
777
778 case VECTOR_CST:
779 return (sizeof (struct tree_vector)
780 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node)) - 1) * sizeof (tree));
781
782 case STRING_CST:
783 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
784
785 case OMP_CLAUSE:
786 return (sizeof (struct tree_omp_clause)
787 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
788 * sizeof (tree));
789
790 default:
791 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
792 return (sizeof (struct tree_exp)
793 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
794 else
795 return tree_code_size (code);
796 }
797 }
798
799 /* Record interesting allocation statistics for a tree node with CODE
800 and LENGTH. */
801
802 static void
803 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
804 size_t length ATTRIBUTE_UNUSED)
805 {
806 enum tree_code_class type = TREE_CODE_CLASS (code);
807 tree_node_kind kind;
808
809 if (!GATHER_STATISTICS)
810 return;
811
812 switch (type)
813 {
814 case tcc_declaration: /* A decl node */
815 kind = d_kind;
816 break;
817
818 case tcc_type: /* a type node */
819 kind = t_kind;
820 break;
821
822 case tcc_statement: /* an expression with side effects */
823 kind = s_kind;
824 break;
825
826 case tcc_reference: /* a reference */
827 kind = r_kind;
828 break;
829
830 case tcc_expression: /* an expression */
831 case tcc_comparison: /* a comparison expression */
832 case tcc_unary: /* a unary arithmetic expression */
833 case tcc_binary: /* a binary arithmetic expression */
834 kind = e_kind;
835 break;
836
837 case tcc_constant: /* a constant */
838 kind = c_kind;
839 break;
840
841 case tcc_exceptional: /* something random, like an identifier. */
842 switch (code)
843 {
844 case IDENTIFIER_NODE:
845 kind = id_kind;
846 break;
847
848 case TREE_VEC:
849 kind = vec_kind;
850 break;
851
852 case TREE_BINFO:
853 kind = binfo_kind;
854 break;
855
856 case SSA_NAME:
857 kind = ssa_name_kind;
858 break;
859
860 case BLOCK:
861 kind = b_kind;
862 break;
863
864 case CONSTRUCTOR:
865 kind = constr_kind;
866 break;
867
868 case OMP_CLAUSE:
869 kind = omp_clause_kind;
870 break;
871
872 default:
873 kind = x_kind;
874 break;
875 }
876 break;
877
878 case tcc_vl_exp:
879 kind = e_kind;
880 break;
881
882 default:
883 gcc_unreachable ();
884 }
885
886 tree_code_counts[(int) code]++;
887 tree_node_counts[(int) kind]++;
888 tree_node_sizes[(int) kind] += length;
889 }
890
891 /* Allocate and return a new UID from the DECL_UID namespace. */
892
893 int
894 allocate_decl_uid (void)
895 {
896 return next_decl_uid++;
897 }
898
899 /* Return a newly allocated node of code CODE. For decl and type
900 nodes, some other fields are initialized. The rest of the node is
901 initialized to zero. This function cannot be used for TREE_VEC,
902 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
903 tree_code_size.
904
905 Achoo! I got a code in the node. */
906
907 tree
908 make_node_stat (enum tree_code code MEM_STAT_DECL)
909 {
910 tree t;
911 enum tree_code_class type = TREE_CODE_CLASS (code);
912 size_t length = tree_code_size (code);
913
914 record_node_allocation_statistics (code, length);
915
916 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
917 TREE_SET_CODE (t, code);
918
919 switch (type)
920 {
921 case tcc_statement:
922 TREE_SIDE_EFFECTS (t) = 1;
923 break;
924
925 case tcc_declaration:
926 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
927 {
928 if (code == FUNCTION_DECL)
929 {
930 DECL_ALIGN (t) = FUNCTION_BOUNDARY;
931 DECL_MODE (t) = FUNCTION_MODE;
932 }
933 else
934 DECL_ALIGN (t) = 1;
935 }
936 DECL_SOURCE_LOCATION (t) = input_location;
937 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
938 DECL_UID (t) = --next_debug_decl_uid;
939 else
940 {
941 DECL_UID (t) = allocate_decl_uid ();
942 SET_DECL_PT_UID (t, -1);
943 }
944 if (TREE_CODE (t) == LABEL_DECL)
945 LABEL_DECL_UID (t) = -1;
946
947 break;
948
949 case tcc_type:
950 TYPE_UID (t) = next_type_uid++;
951 TYPE_ALIGN (t) = BITS_PER_UNIT;
952 TYPE_USER_ALIGN (t) = 0;
953 TYPE_MAIN_VARIANT (t) = t;
954 TYPE_CANONICAL (t) = t;
955
956 /* Default to no attributes for type, but let target change that. */
957 TYPE_ATTRIBUTES (t) = NULL_TREE;
958 targetm.set_default_type_attributes (t);
959
960 /* We have not yet computed the alias set for this type. */
961 TYPE_ALIAS_SET (t) = -1;
962 break;
963
964 case tcc_constant:
965 TREE_CONSTANT (t) = 1;
966 break;
967
968 case tcc_expression:
969 switch (code)
970 {
971 case INIT_EXPR:
972 case MODIFY_EXPR:
973 case VA_ARG_EXPR:
974 case PREDECREMENT_EXPR:
975 case PREINCREMENT_EXPR:
976 case POSTDECREMENT_EXPR:
977 case POSTINCREMENT_EXPR:
978 /* All of these have side-effects, no matter what their
979 operands are. */
980 TREE_SIDE_EFFECTS (t) = 1;
981 break;
982
983 default:
984 break;
985 }
986 break;
987
988 default:
989 /* Other classes need no special treatment. */
990 break;
991 }
992
993 return t;
994 }
995 \f
996 /* Return a new node with the same contents as NODE except that its
997 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
998
999 tree
1000 copy_node_stat (tree node MEM_STAT_DECL)
1001 {
1002 tree t;
1003 enum tree_code code = TREE_CODE (node);
1004 size_t length;
1005
1006 gcc_assert (code != STATEMENT_LIST);
1007
1008 length = tree_size (node);
1009 record_node_allocation_statistics (code, length);
1010 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1011 memcpy (t, node, length);
1012
1013 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1014 TREE_CHAIN (t) = 0;
1015 TREE_ASM_WRITTEN (t) = 0;
1016 TREE_VISITED (t) = 0;
1017
1018 if (TREE_CODE_CLASS (code) == tcc_declaration)
1019 {
1020 if (code == DEBUG_EXPR_DECL)
1021 DECL_UID (t) = --next_debug_decl_uid;
1022 else
1023 {
1024 DECL_UID (t) = allocate_decl_uid ();
1025 if (DECL_PT_UID_SET_P (node))
1026 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1027 }
1028 if ((TREE_CODE (node) == PARM_DECL || TREE_CODE (node) == VAR_DECL)
1029 && DECL_HAS_VALUE_EXPR_P (node))
1030 {
1031 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1032 DECL_HAS_VALUE_EXPR_P (t) = 1;
1033 }
1034 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1035 if (TREE_CODE (node) == VAR_DECL)
1036 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1037 if (TREE_CODE (node) == VAR_DECL && DECL_HAS_INIT_PRIORITY_P (node))
1038 {
1039 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1040 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1041 }
1042 if (TREE_CODE (node) == FUNCTION_DECL)
1043 DECL_STRUCT_FUNCTION (t) = NULL;
1044 }
1045 else if (TREE_CODE_CLASS (code) == tcc_type)
1046 {
1047 TYPE_UID (t) = next_type_uid++;
1048 /* The following is so that the debug code for
1049 the copy is different from the original type.
1050 The two statements usually duplicate each other
1051 (because they clear fields of the same union),
1052 but the optimizer should catch that. */
1053 TYPE_SYMTAB_POINTER (t) = 0;
1054 TYPE_SYMTAB_ADDRESS (t) = 0;
1055
1056 /* Do not copy the values cache. */
1057 if (TYPE_CACHED_VALUES_P (t))
1058 {
1059 TYPE_CACHED_VALUES_P (t) = 0;
1060 TYPE_CACHED_VALUES (t) = NULL_TREE;
1061 }
1062 }
1063
1064 return t;
1065 }
1066
1067 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1068 For example, this can copy a list made of TREE_LIST nodes. */
1069
1070 tree
1071 copy_list (tree list)
1072 {
1073 tree head;
1074 tree prev, next;
1075
1076 if (list == 0)
1077 return 0;
1078
1079 head = prev = copy_node (list);
1080 next = TREE_CHAIN (list);
1081 while (next)
1082 {
1083 TREE_CHAIN (prev) = copy_node (next);
1084 prev = TREE_CHAIN (prev);
1085 next = TREE_CHAIN (next);
1086 }
1087 return head;
1088 }
1089
1090 \f
1091 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1092 INTEGER_CST with value CST and type TYPE. */
1093
1094 static unsigned int
1095 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1096 {
1097 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1098 /* We need an extra zero HWI if CST is an unsigned integer with its
1099 upper bit set, and if CST occupies a whole number of HWIs. */
1100 if (TYPE_UNSIGNED (type)
1101 && wi::neg_p (cst)
1102 && (cst.get_precision () % HOST_BITS_PER_WIDE_INT) == 0)
1103 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1104 return cst.get_len ();
1105 }
1106
1107 /* Return a new INTEGER_CST with value CST and type TYPE. */
1108
1109 static tree
1110 build_new_int_cst (tree type, const wide_int &cst)
1111 {
1112 unsigned int len = cst.get_len ();
1113 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1114 tree nt = make_int_cst (len, ext_len);
1115
1116 if (len < ext_len)
1117 {
1118 --ext_len;
1119 TREE_INT_CST_ELT (nt, ext_len) = 0;
1120 for (unsigned int i = len; i < ext_len; ++i)
1121 TREE_INT_CST_ELT (nt, i) = -1;
1122 }
1123 else if (TYPE_UNSIGNED (type)
1124 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1125 {
1126 len--;
1127 TREE_INT_CST_ELT (nt, len)
1128 = zext_hwi (cst.elt (len),
1129 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1130 }
1131
1132 for (unsigned int i = 0; i < len; i++)
1133 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1134 TREE_TYPE (nt) = type;
1135 return nt;
1136 }
1137
1138 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1139
1140 tree
1141 build_int_cst (tree type, HOST_WIDE_INT low)
1142 {
1143 /* Support legacy code. */
1144 if (!type)
1145 type = integer_type_node;
1146
1147 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1148 }
1149
1150 tree
1151 build_int_cstu (tree type, unsigned HOST_WIDE_INT cst)
1152 {
1153 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1154 }
1155
1156 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1157
1158 tree
1159 build_int_cst_type (tree type, HOST_WIDE_INT low)
1160 {
1161 gcc_assert (type);
1162 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1163 }
1164
1165 /* Constructs tree in type TYPE from with value given by CST. Signedness
1166 of CST is assumed to be the same as the signedness of TYPE. */
1167
1168 tree
1169 double_int_to_tree (tree type, double_int cst)
1170 {
1171 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1172 }
1173
1174 /* We force the wide_int CST to the range of the type TYPE by sign or
1175 zero extending it. OVERFLOWABLE indicates if we are interested in
1176 overflow of the value, when >0 we are only interested in signed
1177 overflow, for <0 we are interested in any overflow. OVERFLOWED
1178 indicates whether overflow has already occurred. CONST_OVERFLOWED
1179 indicates whether constant overflow has already occurred. We force
1180 T's value to be within range of T's type (by setting to 0 or 1 all
1181 the bits outside the type's range). We set TREE_OVERFLOWED if,
1182 OVERFLOWED is nonzero,
1183 or OVERFLOWABLE is >0 and signed overflow occurs
1184 or OVERFLOWABLE is <0 and any overflow occurs
1185 We return a new tree node for the extended wide_int. The node
1186 is shared if no overflow flags are set. */
1187
1188
1189 tree
1190 force_fit_type (tree type, const wide_int_ref &cst,
1191 int overflowable, bool overflowed)
1192 {
1193 signop sign = TYPE_SIGN (type);
1194
1195 /* If we need to set overflow flags, return a new unshared node. */
1196 if (overflowed || !wi::fits_to_tree_p (cst, type))
1197 {
1198 if (overflowed
1199 || overflowable < 0
1200 || (overflowable > 0 && sign == SIGNED))
1201 {
1202 wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign);
1203 tree t = build_new_int_cst (type, tmp);
1204 TREE_OVERFLOW (t) = 1;
1205 return t;
1206 }
1207 }
1208
1209 /* Else build a shared node. */
1210 return wide_int_to_tree (type, cst);
1211 }
1212
1213 /* These are the hash table functions for the hash table of INTEGER_CST
1214 nodes of a sizetype. */
1215
1216 /* Return the hash code code X, an INTEGER_CST. */
1217
1218 static hashval_t
1219 int_cst_hash_hash (const void *x)
1220 {
1221 const_tree const t = (const_tree) x;
1222 hashval_t code = htab_hash_pointer (TREE_TYPE (t));
1223 int i;
1224
1225 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1226 code ^= TREE_INT_CST_ELT (t, i);
1227
1228 return code;
1229 }
1230
1231 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1232 is the same as that given by *Y, which is the same. */
1233
1234 static int
1235 int_cst_hash_eq (const void *x, const void *y)
1236 {
1237 const_tree const xt = (const_tree) x;
1238 const_tree const yt = (const_tree) y;
1239
1240 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1241 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1242 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1243 return false;
1244
1245 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1246 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1247 return false;
1248
1249 return true;
1250 }
1251
1252 /* Create an INT_CST node of TYPE and value CST.
1253 The returned node is always shared. For small integers we use a
1254 per-type vector cache, for larger ones we use a single hash table.
1255 The value is extended from it's precision according to the sign of
1256 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1257 the upper bits and ensures that hashing and value equality based
1258 upon the underlying HOST_WIDE_INTs works without masking. */
1259
1260 tree
1261 wide_int_to_tree (tree type, const wide_int_ref &pcst)
1262 {
1263 tree t;
1264 int ix = -1;
1265 int limit = 0;
1266
1267 gcc_assert (type);
1268 unsigned int prec = TYPE_PRECISION (type);
1269 signop sgn = TYPE_SIGN (type);
1270
1271 /* Verify that everything is canonical. */
1272 int l = pcst.get_len ();
1273 if (l > 1)
1274 {
1275 if (pcst.elt (l - 1) == 0)
1276 gcc_assert (pcst.elt (l - 2) < 0);
1277 if (pcst.elt (l - 1) == (HOST_WIDE_INT) -1)
1278 gcc_assert (pcst.elt (l - 2) >= 0);
1279 }
1280
1281 wide_int cst = wide_int::from (pcst, prec, sgn);
1282 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1283
1284 switch (TREE_CODE (type))
1285 {
1286 case NULLPTR_TYPE:
1287 gcc_assert (cst == 0);
1288 /* Fallthru. */
1289
1290 case POINTER_TYPE:
1291 case REFERENCE_TYPE:
1292 case POINTER_BOUNDS_TYPE:
1293 /* Cache NULL pointer and zero bounds. */
1294 if (cst == 0)
1295 {
1296 limit = 1;
1297 ix = 0;
1298 }
1299 break;
1300
1301 case BOOLEAN_TYPE:
1302 /* Cache false or true. */
1303 limit = 2;
1304 if (wi::leu_p (cst, 1))
1305 ix = cst.to_uhwi ();
1306 break;
1307
1308 case INTEGER_TYPE:
1309 case OFFSET_TYPE:
1310 if (TYPE_SIGN (type) == UNSIGNED)
1311 {
1312 /* Cache 0..N */
1313 limit = INTEGER_SHARE_LIMIT;
1314
1315 /* This is a little hokie, but if the prec is smaller than
1316 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1317 obvious test will not get the correct answer. */
1318 if (prec < HOST_BITS_PER_WIDE_INT)
1319 {
1320 if (cst.to_uhwi () < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1321 ix = cst.to_uhwi ();
1322 }
1323 else if (wi::ltu_p (cst, INTEGER_SHARE_LIMIT))
1324 ix = cst.to_uhwi ();
1325 }
1326 else
1327 {
1328 /* Cache -1..N */
1329 limit = INTEGER_SHARE_LIMIT + 1;
1330
1331 if (cst == -1)
1332 ix = 0;
1333 else if (!wi::neg_p (cst))
1334 {
1335 if (prec < HOST_BITS_PER_WIDE_INT)
1336 {
1337 if (cst.to_shwi () < INTEGER_SHARE_LIMIT)
1338 ix = cst.to_shwi () + 1;
1339 }
1340 else if (wi::lts_p (cst, INTEGER_SHARE_LIMIT))
1341 ix = cst.to_shwi () + 1;
1342 }
1343 }
1344 break;
1345
1346 case ENUMERAL_TYPE:
1347 break;
1348
1349 default:
1350 gcc_unreachable ();
1351 }
1352
1353 if (ext_len == 1)
1354 {
1355 /* We just need to store a single HOST_WIDE_INT. */
1356 HOST_WIDE_INT hwi;
1357 if (TYPE_UNSIGNED (type))
1358 hwi = cst.to_uhwi ();
1359 else
1360 hwi = cst.to_shwi ();
1361 if (ix >= 0)
1362 {
1363 /* Look for it in the type's vector of small shared ints. */
1364 if (!TYPE_CACHED_VALUES_P (type))
1365 {
1366 TYPE_CACHED_VALUES_P (type) = 1;
1367 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1368 }
1369
1370 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1371 if (t)
1372 /* Make sure no one is clobbering the shared constant. */
1373 gcc_assert (TREE_TYPE (t) == type
1374 && TREE_INT_CST_NUNITS (t) == 1
1375 && TREE_INT_CST_EXT_NUNITS (t) == 1
1376 && TREE_INT_CST_ELT (t, 0) == hwi);
1377 else
1378 {
1379 /* Create a new shared int. */
1380 t = build_new_int_cst (type, cst);
1381 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1382 }
1383 }
1384 else
1385 {
1386 /* Use the cache of larger shared ints, using int_cst_node as
1387 a temporary. */
1388 void **slot;
1389
1390 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1391 TREE_TYPE (int_cst_node) = type;
1392
1393 slot = htab_find_slot (int_cst_hash_table, int_cst_node, INSERT);
1394 t = (tree) *slot;
1395 if (!t)
1396 {
1397 /* Insert this one into the hash table. */
1398 t = int_cst_node;
1399 *slot = t;
1400 /* Make a new node for next time round. */
1401 int_cst_node = make_int_cst (1, 1);
1402 }
1403 }
1404 }
1405 else
1406 {
1407 /* The value either hashes properly or we drop it on the floor
1408 for the gc to take care of. There will not be enough of them
1409 to worry about. */
1410 void **slot;
1411
1412 tree nt = build_new_int_cst (type, cst);
1413 slot = htab_find_slot (int_cst_hash_table, nt, INSERT);
1414 t = (tree) *slot;
1415 if (!t)
1416 {
1417 /* Insert this one into the hash table. */
1418 t = nt;
1419 *slot = t;
1420 }
1421 }
1422
1423 return t;
1424 }
1425
1426 void
1427 cache_integer_cst (tree t)
1428 {
1429 tree type = TREE_TYPE (t);
1430 int ix = -1;
1431 int limit = 0;
1432 int prec = TYPE_PRECISION (type);
1433
1434 gcc_assert (!TREE_OVERFLOW (t));
1435
1436 switch (TREE_CODE (type))
1437 {
1438 case NULLPTR_TYPE:
1439 gcc_assert (integer_zerop (t));
1440 /* Fallthru. */
1441
1442 case POINTER_TYPE:
1443 case REFERENCE_TYPE:
1444 /* Cache NULL pointer. */
1445 if (integer_zerop (t))
1446 {
1447 limit = 1;
1448 ix = 0;
1449 }
1450 break;
1451
1452 case BOOLEAN_TYPE:
1453 /* Cache false or true. */
1454 limit = 2;
1455 if (wi::ltu_p (t, 2))
1456 ix = TREE_INT_CST_ELT (t, 0);
1457 break;
1458
1459 case INTEGER_TYPE:
1460 case OFFSET_TYPE:
1461 if (TYPE_UNSIGNED (type))
1462 {
1463 /* Cache 0..N */
1464 limit = INTEGER_SHARE_LIMIT;
1465
1466 /* This is a little hokie, but if the prec is smaller than
1467 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1468 obvious test will not get the correct answer. */
1469 if (prec < HOST_BITS_PER_WIDE_INT)
1470 {
1471 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1472 ix = tree_to_uhwi (t);
1473 }
1474 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1475 ix = tree_to_uhwi (t);
1476 }
1477 else
1478 {
1479 /* Cache -1..N */
1480 limit = INTEGER_SHARE_LIMIT + 1;
1481
1482 if (integer_minus_onep (t))
1483 ix = 0;
1484 else if (!wi::neg_p (t))
1485 {
1486 if (prec < HOST_BITS_PER_WIDE_INT)
1487 {
1488 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1489 ix = tree_to_shwi (t) + 1;
1490 }
1491 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1492 ix = tree_to_shwi (t) + 1;
1493 }
1494 }
1495 break;
1496
1497 case ENUMERAL_TYPE:
1498 break;
1499
1500 default:
1501 gcc_unreachable ();
1502 }
1503
1504 if (ix >= 0)
1505 {
1506 /* Look for it in the type's vector of small shared ints. */
1507 if (!TYPE_CACHED_VALUES_P (type))
1508 {
1509 TYPE_CACHED_VALUES_P (type) = 1;
1510 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1511 }
1512
1513 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1514 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1515 }
1516 else
1517 {
1518 /* Use the cache of larger shared ints. */
1519 void **slot;
1520
1521 slot = htab_find_slot (int_cst_hash_table, t, INSERT);
1522 /* If there is already an entry for the number verify it's the
1523 same. */
1524 if (*slot)
1525 gcc_assert (wi::eq_p (tree (*slot), t));
1526 else
1527 /* Otherwise insert this one into the hash table. */
1528 *slot = t;
1529 }
1530 }
1531
1532
1533 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1534 and the rest are zeros. */
1535
1536 tree
1537 build_low_bits_mask (tree type, unsigned bits)
1538 {
1539 gcc_assert (bits <= TYPE_PRECISION (type));
1540
1541 return wide_int_to_tree (type, wi::mask (bits, false,
1542 TYPE_PRECISION (type)));
1543 }
1544
1545 /* Build a newly constructed TREE_VEC node of length LEN. */
1546
1547 tree
1548 make_vector_stat (unsigned len MEM_STAT_DECL)
1549 {
1550 tree t;
1551 unsigned length = (len - 1) * sizeof (tree) + sizeof (struct tree_vector);
1552
1553 record_node_allocation_statistics (VECTOR_CST, length);
1554
1555 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1556
1557 TREE_SET_CODE (t, VECTOR_CST);
1558 TREE_CONSTANT (t) = 1;
1559
1560 return t;
1561 }
1562
1563 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1564 are in a list pointed to by VALS. */
1565
1566 tree
1567 build_vector_stat (tree type, tree *vals MEM_STAT_DECL)
1568 {
1569 int over = 0;
1570 unsigned cnt = 0;
1571 tree v = make_vector (TYPE_VECTOR_SUBPARTS (type));
1572 TREE_TYPE (v) = type;
1573
1574 /* Iterate through elements and check for overflow. */
1575 for (cnt = 0; cnt < TYPE_VECTOR_SUBPARTS (type); ++cnt)
1576 {
1577 tree value = vals[cnt];
1578
1579 VECTOR_CST_ELT (v, cnt) = value;
1580
1581 /* Don't crash if we get an address constant. */
1582 if (!CONSTANT_CLASS_P (value))
1583 continue;
1584
1585 over |= TREE_OVERFLOW (value);
1586 }
1587
1588 TREE_OVERFLOW (v) = over;
1589 return v;
1590 }
1591
1592 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1593 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1594
1595 tree
1596 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1597 {
1598 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
1599 unsigned HOST_WIDE_INT idx;
1600 tree value;
1601
1602 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1603 vec[idx] = value;
1604 for (; idx < TYPE_VECTOR_SUBPARTS (type); ++idx)
1605 vec[idx] = build_zero_cst (TREE_TYPE (type));
1606
1607 return build_vector (type, vec);
1608 }
1609
1610 /* Build a vector of type VECTYPE where all the elements are SCs. */
1611 tree
1612 build_vector_from_val (tree vectype, tree sc)
1613 {
1614 int i, nunits = TYPE_VECTOR_SUBPARTS (vectype);
1615
1616 if (sc == error_mark_node)
1617 return sc;
1618
1619 /* Verify that the vector type is suitable for SC. Note that there
1620 is some inconsistency in the type-system with respect to restrict
1621 qualifications of pointers. Vector types always have a main-variant
1622 element type and the qualification is applied to the vector-type.
1623 So TREE_TYPE (vector-type) does not return a properly qualified
1624 vector element-type. */
1625 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1626 TREE_TYPE (vectype)));
1627
1628 if (CONSTANT_CLASS_P (sc))
1629 {
1630 tree *v = XALLOCAVEC (tree, nunits);
1631 for (i = 0; i < nunits; ++i)
1632 v[i] = sc;
1633 return build_vector (vectype, v);
1634 }
1635 else
1636 {
1637 vec<constructor_elt, va_gc> *v;
1638 vec_alloc (v, nunits);
1639 for (i = 0; i < nunits; ++i)
1640 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1641 return build_constructor (vectype, v);
1642 }
1643 }
1644
1645 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1646 are in the vec pointed to by VALS. */
1647 tree
1648 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
1649 {
1650 tree c = make_node (CONSTRUCTOR);
1651 unsigned int i;
1652 constructor_elt *elt;
1653 bool constant_p = true;
1654 bool side_effects_p = false;
1655
1656 TREE_TYPE (c) = type;
1657 CONSTRUCTOR_ELTS (c) = vals;
1658
1659 FOR_EACH_VEC_SAFE_ELT (vals, i, elt)
1660 {
1661 /* Mostly ctors will have elts that don't have side-effects, so
1662 the usual case is to scan all the elements. Hence a single
1663 loop for both const and side effects, rather than one loop
1664 each (with early outs). */
1665 if (!TREE_CONSTANT (elt->value))
1666 constant_p = false;
1667 if (TREE_SIDE_EFFECTS (elt->value))
1668 side_effects_p = true;
1669 }
1670
1671 TREE_SIDE_EFFECTS (c) = side_effects_p;
1672 TREE_CONSTANT (c) = constant_p;
1673
1674 return c;
1675 }
1676
1677 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1678 INDEX and VALUE. */
1679 tree
1680 build_constructor_single (tree type, tree index, tree value)
1681 {
1682 vec<constructor_elt, va_gc> *v;
1683 constructor_elt elt = {index, value};
1684
1685 vec_alloc (v, 1);
1686 v->quick_push (elt);
1687
1688 return build_constructor (type, v);
1689 }
1690
1691
1692 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1693 are in a list pointed to by VALS. */
1694 tree
1695 build_constructor_from_list (tree type, tree vals)
1696 {
1697 tree t;
1698 vec<constructor_elt, va_gc> *v = NULL;
1699
1700 if (vals)
1701 {
1702 vec_alloc (v, list_length (vals));
1703 for (t = vals; t; t = TREE_CHAIN (t))
1704 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
1705 }
1706
1707 return build_constructor (type, v);
1708 }
1709
1710 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1711 of elements, provided as index/value pairs. */
1712
1713 tree
1714 build_constructor_va (tree type, int nelts, ...)
1715 {
1716 vec<constructor_elt, va_gc> *v = NULL;
1717 va_list p;
1718
1719 va_start (p, nelts);
1720 vec_alloc (v, nelts);
1721 while (nelts--)
1722 {
1723 tree index = va_arg (p, tree);
1724 tree value = va_arg (p, tree);
1725 CONSTRUCTOR_APPEND_ELT (v, index, value);
1726 }
1727 va_end (p);
1728 return build_constructor (type, v);
1729 }
1730
1731 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1732
1733 tree
1734 build_fixed (tree type, FIXED_VALUE_TYPE f)
1735 {
1736 tree v;
1737 FIXED_VALUE_TYPE *fp;
1738
1739 v = make_node (FIXED_CST);
1740 fp = ggc_alloc_fixed_value ();
1741 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
1742
1743 TREE_TYPE (v) = type;
1744 TREE_FIXED_CST_PTR (v) = fp;
1745 return v;
1746 }
1747
1748 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1749
1750 tree
1751 build_real (tree type, REAL_VALUE_TYPE d)
1752 {
1753 tree v;
1754 REAL_VALUE_TYPE *dp;
1755 int overflow = 0;
1756
1757 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1758 Consider doing it via real_convert now. */
1759
1760 v = make_node (REAL_CST);
1761 dp = ggc_alloc_real_value ();
1762 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
1763
1764 TREE_TYPE (v) = type;
1765 TREE_REAL_CST_PTR (v) = dp;
1766 TREE_OVERFLOW (v) = overflow;
1767 return v;
1768 }
1769
1770 /* Return a new REAL_CST node whose type is TYPE
1771 and whose value is the integer value of the INTEGER_CST node I. */
1772
1773 REAL_VALUE_TYPE
1774 real_value_from_int_cst (const_tree type, const_tree i)
1775 {
1776 REAL_VALUE_TYPE d;
1777
1778 /* Clear all bits of the real value type so that we can later do
1779 bitwise comparisons to see if two values are the same. */
1780 memset (&d, 0, sizeof d);
1781
1782 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode,
1783 wide_int (i),
1784 TYPE_SIGN (TREE_TYPE (i)));
1785 return d;
1786 }
1787
1788 /* Given a tree representing an integer constant I, return a tree
1789 representing the same value as a floating-point constant of type TYPE. */
1790
1791 tree
1792 build_real_from_int_cst (tree type, const_tree i)
1793 {
1794 tree v;
1795 int overflow = TREE_OVERFLOW (i);
1796
1797 v = build_real (type, real_value_from_int_cst (type, i));
1798
1799 TREE_OVERFLOW (v) |= overflow;
1800 return v;
1801 }
1802
1803 /* Return a newly constructed STRING_CST node whose value is
1804 the LEN characters at STR.
1805 Note that for a C string literal, LEN should include the trailing NUL.
1806 The TREE_TYPE is not initialized. */
1807
1808 tree
1809 build_string (int len, const char *str)
1810 {
1811 tree s;
1812 size_t length;
1813
1814 /* Do not waste bytes provided by padding of struct tree_string. */
1815 length = len + offsetof (struct tree_string, str) + 1;
1816
1817 record_node_allocation_statistics (STRING_CST, length);
1818
1819 s = ggc_alloc_tree_node (length);
1820
1821 memset (s, 0, sizeof (struct tree_typed));
1822 TREE_SET_CODE (s, STRING_CST);
1823 TREE_CONSTANT (s) = 1;
1824 TREE_STRING_LENGTH (s) = len;
1825 memcpy (s->string.str, str, len);
1826 s->string.str[len] = '\0';
1827
1828 return s;
1829 }
1830
1831 /* Return a newly constructed COMPLEX_CST node whose value is
1832 specified by the real and imaginary parts REAL and IMAG.
1833 Both REAL and IMAG should be constant nodes. TYPE, if specified,
1834 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
1835
1836 tree
1837 build_complex (tree type, tree real, tree imag)
1838 {
1839 tree t = make_node (COMPLEX_CST);
1840
1841 TREE_REALPART (t) = real;
1842 TREE_IMAGPART (t) = imag;
1843 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
1844 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
1845 return t;
1846 }
1847
1848 /* Return a constant of arithmetic type TYPE which is the
1849 multiplicative identity of the set TYPE. */
1850
1851 tree
1852 build_one_cst (tree type)
1853 {
1854 switch (TREE_CODE (type))
1855 {
1856 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1857 case POINTER_TYPE: case REFERENCE_TYPE:
1858 case OFFSET_TYPE:
1859 return build_int_cst (type, 1);
1860
1861 case REAL_TYPE:
1862 return build_real (type, dconst1);
1863
1864 case FIXED_POINT_TYPE:
1865 /* We can only generate 1 for accum types. */
1866 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1867 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
1868
1869 case VECTOR_TYPE:
1870 {
1871 tree scalar = build_one_cst (TREE_TYPE (type));
1872
1873 return build_vector_from_val (type, scalar);
1874 }
1875
1876 case COMPLEX_TYPE:
1877 return build_complex (type,
1878 build_one_cst (TREE_TYPE (type)),
1879 build_zero_cst (TREE_TYPE (type)));
1880
1881 default:
1882 gcc_unreachable ();
1883 }
1884 }
1885
1886 /* Return an integer of type TYPE containing all 1's in as much precision as
1887 it contains, or a complex or vector whose subparts are such integers. */
1888
1889 tree
1890 build_all_ones_cst (tree type)
1891 {
1892 if (TREE_CODE (type) == COMPLEX_TYPE)
1893 {
1894 tree scalar = build_all_ones_cst (TREE_TYPE (type));
1895 return build_complex (type, scalar, scalar);
1896 }
1897 else
1898 return build_minus_one_cst (type);
1899 }
1900
1901 /* Return a constant of arithmetic type TYPE which is the
1902 opposite of the multiplicative identity of the set TYPE. */
1903
1904 tree
1905 build_minus_one_cst (tree type)
1906 {
1907 switch (TREE_CODE (type))
1908 {
1909 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1910 case POINTER_TYPE: case REFERENCE_TYPE:
1911 case OFFSET_TYPE:
1912 return build_int_cst (type, -1);
1913
1914 case REAL_TYPE:
1915 return build_real (type, dconstm1);
1916
1917 case FIXED_POINT_TYPE:
1918 /* We can only generate 1 for accum types. */
1919 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1920 return build_fixed (type, fixed_from_double_int (double_int_minus_one,
1921 TYPE_MODE (type)));
1922
1923 case VECTOR_TYPE:
1924 {
1925 tree scalar = build_minus_one_cst (TREE_TYPE (type));
1926
1927 return build_vector_from_val (type, scalar);
1928 }
1929
1930 case COMPLEX_TYPE:
1931 return build_complex (type,
1932 build_minus_one_cst (TREE_TYPE (type)),
1933 build_zero_cst (TREE_TYPE (type)));
1934
1935 default:
1936 gcc_unreachable ();
1937 }
1938 }
1939
1940 /* Build 0 constant of type TYPE. This is used by constructor folding
1941 and thus the constant should be represented in memory by
1942 zero(es). */
1943
1944 tree
1945 build_zero_cst (tree type)
1946 {
1947 switch (TREE_CODE (type))
1948 {
1949 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1950 case POINTER_TYPE: case REFERENCE_TYPE:
1951 case OFFSET_TYPE: case NULLPTR_TYPE:
1952 return build_int_cst (type, 0);
1953
1954 case REAL_TYPE:
1955 return build_real (type, dconst0);
1956
1957 case FIXED_POINT_TYPE:
1958 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
1959
1960 case VECTOR_TYPE:
1961 {
1962 tree scalar = build_zero_cst (TREE_TYPE (type));
1963
1964 return build_vector_from_val (type, scalar);
1965 }
1966
1967 case COMPLEX_TYPE:
1968 {
1969 tree zero = build_zero_cst (TREE_TYPE (type));
1970
1971 return build_complex (type, zero, zero);
1972 }
1973
1974 default:
1975 if (!AGGREGATE_TYPE_P (type))
1976 return fold_convert (type, integer_zero_node);
1977 return build_constructor (type, NULL);
1978 }
1979 }
1980
1981
1982 /* Build a BINFO with LEN language slots. */
1983
1984 tree
1985 make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL)
1986 {
1987 tree t;
1988 size_t length = (offsetof (struct tree_binfo, base_binfos)
1989 + vec<tree, va_gc>::embedded_size (base_binfos));
1990
1991 record_node_allocation_statistics (TREE_BINFO, length);
1992
1993 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1994
1995 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
1996
1997 TREE_SET_CODE (t, TREE_BINFO);
1998
1999 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2000
2001 return t;
2002 }
2003
2004 /* Create a CASE_LABEL_EXPR tree node and return it. */
2005
2006 tree
2007 build_case_label (tree low_value, tree high_value, tree label_decl)
2008 {
2009 tree t = make_node (CASE_LABEL_EXPR);
2010
2011 TREE_TYPE (t) = void_type_node;
2012 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2013
2014 CASE_LOW (t) = low_value;
2015 CASE_HIGH (t) = high_value;
2016 CASE_LABEL (t) = label_decl;
2017 CASE_CHAIN (t) = NULL_TREE;
2018
2019 return t;
2020 }
2021
2022 /* Build a newly constructed INETEGER_CST node of length LEN. */
2023
2024 tree
2025 make_int_cst_stat (int len, int ext_len MEM_STAT_DECL)
2026 {
2027 tree t;
2028 int length = (ext_len - 1) * sizeof (tree) + sizeof (struct tree_int_cst);
2029
2030 gcc_assert (len);
2031 record_node_allocation_statistics (INTEGER_CST, length);
2032
2033 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2034
2035 TREE_SET_CODE (t, INTEGER_CST);
2036 TREE_INT_CST_NUNITS (t) = len;
2037 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2038
2039 TREE_CONSTANT (t) = 1;
2040
2041 return t;
2042 }
2043
2044 /* Build a newly constructed TREE_VEC node of length LEN. */
2045
2046 tree
2047 make_tree_vec_stat (int len MEM_STAT_DECL)
2048 {
2049 tree t;
2050 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2051
2052 record_node_allocation_statistics (TREE_VEC, length);
2053
2054 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2055
2056 TREE_SET_CODE (t, TREE_VEC);
2057 TREE_VEC_LENGTH (t) = len;
2058
2059 return t;
2060 }
2061 \f
2062 /* Return 1 if EXPR is the integer constant zero or a complex constant
2063 of zero. */
2064
2065 int
2066 integer_zerop (const_tree expr)
2067 {
2068 STRIP_NOPS (expr);
2069
2070 switch (TREE_CODE (expr))
2071 {
2072 case INTEGER_CST:
2073 return wi::eq_p (expr, 0);
2074 case COMPLEX_CST:
2075 return (integer_zerop (TREE_REALPART (expr))
2076 && integer_zerop (TREE_IMAGPART (expr)));
2077 case VECTOR_CST:
2078 {
2079 unsigned i;
2080 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2081 if (!integer_zerop (VECTOR_CST_ELT (expr, i)))
2082 return false;
2083 return true;
2084 }
2085 default:
2086 return false;
2087 }
2088 }
2089
2090 /* Return 1 if EXPR is the integer constant one or the corresponding
2091 complex constant. */
2092
2093 int
2094 integer_onep (const_tree expr)
2095 {
2096 STRIP_NOPS (expr);
2097
2098 switch (TREE_CODE (expr))
2099 {
2100 case INTEGER_CST:
2101 return wi::eq_p (expr, 1);
2102 case COMPLEX_CST:
2103 return (integer_onep (TREE_REALPART (expr))
2104 && integer_zerop (TREE_IMAGPART (expr)));
2105 case VECTOR_CST:
2106 {
2107 unsigned i;
2108 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2109 if (!integer_onep (VECTOR_CST_ELT (expr, i)))
2110 return false;
2111 return true;
2112 }
2113 default:
2114 return false;
2115 }
2116 }
2117
2118 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2119 it contains, or a complex or vector whose subparts are such integers. */
2120
2121 int
2122 integer_all_onesp (const_tree expr)
2123 {
2124 STRIP_NOPS (expr);
2125
2126 if (TREE_CODE (expr) == COMPLEX_CST
2127 && integer_all_onesp (TREE_REALPART (expr))
2128 && integer_all_onesp (TREE_IMAGPART (expr)))
2129 return 1;
2130
2131 else if (TREE_CODE (expr) == VECTOR_CST)
2132 {
2133 unsigned i;
2134 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2135 if (!integer_all_onesp (VECTOR_CST_ELT (expr, i)))
2136 return 0;
2137 return 1;
2138 }
2139
2140 else if (TREE_CODE (expr) != INTEGER_CST)
2141 return 0;
2142
2143 return wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED) == expr;
2144 }
2145
2146 /* Return 1 if EXPR is the integer constant minus one. */
2147
2148 int
2149 integer_minus_onep (const_tree expr)
2150 {
2151 STRIP_NOPS (expr);
2152
2153 if (TREE_CODE (expr) == COMPLEX_CST)
2154 return (integer_all_onesp (TREE_REALPART (expr))
2155 && integer_zerop (TREE_IMAGPART (expr)));
2156 else
2157 return integer_all_onesp (expr);
2158 }
2159
2160 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2161 one bit on). */
2162
2163 int
2164 integer_pow2p (const_tree expr)
2165 {
2166 STRIP_NOPS (expr);
2167
2168 if (TREE_CODE (expr) == COMPLEX_CST
2169 && integer_pow2p (TREE_REALPART (expr))
2170 && integer_zerop (TREE_IMAGPART (expr)))
2171 return 1;
2172
2173 if (TREE_CODE (expr) != INTEGER_CST)
2174 return 0;
2175
2176 return wi::popcount (expr) == 1;
2177 }
2178
2179 /* Return 1 if EXPR is an integer constant other than zero or a
2180 complex constant other than zero. */
2181
2182 int
2183 integer_nonzerop (const_tree expr)
2184 {
2185 STRIP_NOPS (expr);
2186
2187 return ((TREE_CODE (expr) == INTEGER_CST
2188 && !wi::eq_p (expr, 0))
2189 || (TREE_CODE (expr) == COMPLEX_CST
2190 && (integer_nonzerop (TREE_REALPART (expr))
2191 || integer_nonzerop (TREE_IMAGPART (expr)))));
2192 }
2193
2194 /* Return 1 if EXPR is the fixed-point constant zero. */
2195
2196 int
2197 fixed_zerop (const_tree expr)
2198 {
2199 return (TREE_CODE (expr) == FIXED_CST
2200 && TREE_FIXED_CST (expr).data.is_zero ());
2201 }
2202
2203 /* Return the power of two represented by a tree node known to be a
2204 power of two. */
2205
2206 int
2207 tree_log2 (const_tree expr)
2208 {
2209 STRIP_NOPS (expr);
2210
2211 if (TREE_CODE (expr) == COMPLEX_CST)
2212 return tree_log2 (TREE_REALPART (expr));
2213
2214 return wi::exact_log2 (expr);
2215 }
2216
2217 /* Similar, but return the largest integer Y such that 2 ** Y is less
2218 than or equal to EXPR. */
2219
2220 int
2221 tree_floor_log2 (const_tree expr)
2222 {
2223 STRIP_NOPS (expr);
2224
2225 if (TREE_CODE (expr) == COMPLEX_CST)
2226 return tree_log2 (TREE_REALPART (expr));
2227
2228 return wi::floor_log2 (expr);
2229 }
2230
2231 /* Return number of known trailing zero bits in EXPR, or, if the value of
2232 EXPR is known to be zero, the precision of it's type. */
2233
2234 unsigned int
2235 tree_ctz (const_tree expr)
2236 {
2237 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2238 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2239 return 0;
2240
2241 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2242 switch (TREE_CODE (expr))
2243 {
2244 case INTEGER_CST:
2245 ret1 = wi::ctz (expr);
2246 return MIN (ret1, prec);
2247 case SSA_NAME:
2248 ret1 = wi::ctz (get_nonzero_bits (expr));
2249 return MIN (ret1, prec);
2250 case PLUS_EXPR:
2251 case MINUS_EXPR:
2252 case BIT_IOR_EXPR:
2253 case BIT_XOR_EXPR:
2254 case MIN_EXPR:
2255 case MAX_EXPR:
2256 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2257 if (ret1 == 0)
2258 return ret1;
2259 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2260 return MIN (ret1, ret2);
2261 case POINTER_PLUS_EXPR:
2262 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2263 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2264 /* Second operand is sizetype, which could be in theory
2265 wider than pointer's precision. Make sure we never
2266 return more than prec. */
2267 ret2 = MIN (ret2, prec);
2268 return MIN (ret1, ret2);
2269 case BIT_AND_EXPR:
2270 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2271 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2272 return MAX (ret1, ret2);
2273 case MULT_EXPR:
2274 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2275 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2276 return MIN (ret1 + ret2, prec);
2277 case LSHIFT_EXPR:
2278 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2279 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2280 && ((unsigned HOST_WIDE_INT) tree_to_uhwi (TREE_OPERAND (expr, 1))
2281 < (unsigned HOST_WIDE_INT) prec))
2282 {
2283 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2284 return MIN (ret1 + ret2, prec);
2285 }
2286 return ret1;
2287 case RSHIFT_EXPR:
2288 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2289 && ((unsigned HOST_WIDE_INT) tree_to_uhwi (TREE_OPERAND (expr, 1))
2290 < (unsigned HOST_WIDE_INT) prec))
2291 {
2292 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2293 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2294 if (ret1 > ret2)
2295 return ret1 - ret2;
2296 }
2297 return 0;
2298 case TRUNC_DIV_EXPR:
2299 case CEIL_DIV_EXPR:
2300 case FLOOR_DIV_EXPR:
2301 case ROUND_DIV_EXPR:
2302 case EXACT_DIV_EXPR:
2303 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2304 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2305 {
2306 int l = tree_log2 (TREE_OPERAND (expr, 1));
2307 if (l >= 0)
2308 {
2309 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2310 ret2 = l;
2311 if (ret1 > ret2)
2312 return ret1 - ret2;
2313 }
2314 }
2315 return 0;
2316 CASE_CONVERT:
2317 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2318 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2319 ret1 = prec;
2320 return MIN (ret1, prec);
2321 case SAVE_EXPR:
2322 return tree_ctz (TREE_OPERAND (expr, 0));
2323 case COND_EXPR:
2324 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2325 if (ret1 == 0)
2326 return 0;
2327 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2328 return MIN (ret1, ret2);
2329 case COMPOUND_EXPR:
2330 return tree_ctz (TREE_OPERAND (expr, 1));
2331 case ADDR_EXPR:
2332 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2333 if (ret1 > BITS_PER_UNIT)
2334 {
2335 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2336 return MIN (ret1, prec);
2337 }
2338 return 0;
2339 default:
2340 return 0;
2341 }
2342 }
2343
2344 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2345 decimal float constants, so don't return 1 for them. */
2346
2347 int
2348 real_zerop (const_tree expr)
2349 {
2350 STRIP_NOPS (expr);
2351
2352 switch (TREE_CODE (expr))
2353 {
2354 case REAL_CST:
2355 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst0)
2356 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2357 case COMPLEX_CST:
2358 return real_zerop (TREE_REALPART (expr))
2359 && real_zerop (TREE_IMAGPART (expr));
2360 case VECTOR_CST:
2361 {
2362 unsigned i;
2363 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2364 if (!real_zerop (VECTOR_CST_ELT (expr, i)))
2365 return false;
2366 return true;
2367 }
2368 default:
2369 return false;
2370 }
2371 }
2372
2373 /* Return 1 if EXPR is the real constant one in real or complex form.
2374 Trailing zeroes matter for decimal float constants, so don't return
2375 1 for them. */
2376
2377 int
2378 real_onep (const_tree expr)
2379 {
2380 STRIP_NOPS (expr);
2381
2382 switch (TREE_CODE (expr))
2383 {
2384 case REAL_CST:
2385 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst1)
2386 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2387 case COMPLEX_CST:
2388 return real_onep (TREE_REALPART (expr))
2389 && real_zerop (TREE_IMAGPART (expr));
2390 case VECTOR_CST:
2391 {
2392 unsigned i;
2393 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2394 if (!real_onep (VECTOR_CST_ELT (expr, i)))
2395 return false;
2396 return true;
2397 }
2398 default:
2399 return false;
2400 }
2401 }
2402
2403 /* Return 1 if EXPR is the real constant two. Trailing zeroes matter
2404 for decimal float constants, so don't return 1 for them. */
2405
2406 int
2407 real_twop (const_tree expr)
2408 {
2409 STRIP_NOPS (expr);
2410
2411 switch (TREE_CODE (expr))
2412 {
2413 case REAL_CST:
2414 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst2)
2415 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2416 case COMPLEX_CST:
2417 return real_twop (TREE_REALPART (expr))
2418 && real_zerop (TREE_IMAGPART (expr));
2419 case VECTOR_CST:
2420 {
2421 unsigned i;
2422 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2423 if (!real_twop (VECTOR_CST_ELT (expr, i)))
2424 return false;
2425 return true;
2426 }
2427 default:
2428 return false;
2429 }
2430 }
2431
2432 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2433 matter for decimal float constants, so don't return 1 for them. */
2434
2435 int
2436 real_minus_onep (const_tree expr)
2437 {
2438 STRIP_NOPS (expr);
2439
2440 switch (TREE_CODE (expr))
2441 {
2442 case REAL_CST:
2443 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconstm1)
2444 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2445 case COMPLEX_CST:
2446 return real_minus_onep (TREE_REALPART (expr))
2447 && real_zerop (TREE_IMAGPART (expr));
2448 case VECTOR_CST:
2449 {
2450 unsigned i;
2451 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2452 if (!real_minus_onep (VECTOR_CST_ELT (expr, i)))
2453 return false;
2454 return true;
2455 }
2456 default:
2457 return false;
2458 }
2459 }
2460
2461 /* Nonzero if EXP is a constant or a cast of a constant. */
2462
2463 int
2464 really_constant_p (const_tree exp)
2465 {
2466 /* This is not quite the same as STRIP_NOPS. It does more. */
2467 while (CONVERT_EXPR_P (exp)
2468 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2469 exp = TREE_OPERAND (exp, 0);
2470 return TREE_CONSTANT (exp);
2471 }
2472 \f
2473 /* Return first list element whose TREE_VALUE is ELEM.
2474 Return 0 if ELEM is not in LIST. */
2475
2476 tree
2477 value_member (tree elem, tree list)
2478 {
2479 while (list)
2480 {
2481 if (elem == TREE_VALUE (list))
2482 return list;
2483 list = TREE_CHAIN (list);
2484 }
2485 return NULL_TREE;
2486 }
2487
2488 /* Return first list element whose TREE_PURPOSE is ELEM.
2489 Return 0 if ELEM is not in LIST. */
2490
2491 tree
2492 purpose_member (const_tree elem, tree list)
2493 {
2494 while (list)
2495 {
2496 if (elem == TREE_PURPOSE (list))
2497 return list;
2498 list = TREE_CHAIN (list);
2499 }
2500 return NULL_TREE;
2501 }
2502
2503 /* Return true if ELEM is in V. */
2504
2505 bool
2506 vec_member (const_tree elem, vec<tree, va_gc> *v)
2507 {
2508 unsigned ix;
2509 tree t;
2510 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2511 if (elem == t)
2512 return true;
2513 return false;
2514 }
2515
2516 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2517 NULL_TREE. */
2518
2519 tree
2520 chain_index (int idx, tree chain)
2521 {
2522 for (; chain && idx > 0; --idx)
2523 chain = TREE_CHAIN (chain);
2524 return chain;
2525 }
2526
2527 /* Return nonzero if ELEM is part of the chain CHAIN. */
2528
2529 int
2530 chain_member (const_tree elem, const_tree chain)
2531 {
2532 while (chain)
2533 {
2534 if (elem == chain)
2535 return 1;
2536 chain = DECL_CHAIN (chain);
2537 }
2538
2539 return 0;
2540 }
2541
2542 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2543 We expect a null pointer to mark the end of the chain.
2544 This is the Lisp primitive `length'. */
2545
2546 int
2547 list_length (const_tree t)
2548 {
2549 const_tree p = t;
2550 #ifdef ENABLE_TREE_CHECKING
2551 const_tree q = t;
2552 #endif
2553 int len = 0;
2554
2555 while (p)
2556 {
2557 p = TREE_CHAIN (p);
2558 #ifdef ENABLE_TREE_CHECKING
2559 if (len % 2)
2560 q = TREE_CHAIN (q);
2561 gcc_assert (p != q);
2562 #endif
2563 len++;
2564 }
2565
2566 return len;
2567 }
2568
2569 /* Returns the number of FIELD_DECLs in TYPE. */
2570
2571 int
2572 fields_length (const_tree type)
2573 {
2574 tree t = TYPE_FIELDS (type);
2575 int count = 0;
2576
2577 for (; t; t = DECL_CHAIN (t))
2578 if (TREE_CODE (t) == FIELD_DECL)
2579 ++count;
2580
2581 return count;
2582 }
2583
2584 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2585 UNION_TYPE TYPE, or NULL_TREE if none. */
2586
2587 tree
2588 first_field (const_tree type)
2589 {
2590 tree t = TYPE_FIELDS (type);
2591 while (t && TREE_CODE (t) != FIELD_DECL)
2592 t = TREE_CHAIN (t);
2593 return t;
2594 }
2595
2596 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2597 by modifying the last node in chain 1 to point to chain 2.
2598 This is the Lisp primitive `nconc'. */
2599
2600 tree
2601 chainon (tree op1, tree op2)
2602 {
2603 tree t1;
2604
2605 if (!op1)
2606 return op2;
2607 if (!op2)
2608 return op1;
2609
2610 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
2611 continue;
2612 TREE_CHAIN (t1) = op2;
2613
2614 #ifdef ENABLE_TREE_CHECKING
2615 {
2616 tree t2;
2617 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
2618 gcc_assert (t2 != t1);
2619 }
2620 #endif
2621
2622 return op1;
2623 }
2624
2625 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2626
2627 tree
2628 tree_last (tree chain)
2629 {
2630 tree next;
2631 if (chain)
2632 while ((next = TREE_CHAIN (chain)))
2633 chain = next;
2634 return chain;
2635 }
2636
2637 /* Reverse the order of elements in the chain T,
2638 and return the new head of the chain (old last element). */
2639
2640 tree
2641 nreverse (tree t)
2642 {
2643 tree prev = 0, decl, next;
2644 for (decl = t; decl; decl = next)
2645 {
2646 /* We shouldn't be using this function to reverse BLOCK chains; we
2647 have blocks_nreverse for that. */
2648 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
2649 next = TREE_CHAIN (decl);
2650 TREE_CHAIN (decl) = prev;
2651 prev = decl;
2652 }
2653 return prev;
2654 }
2655 \f
2656 /* Return a newly created TREE_LIST node whose
2657 purpose and value fields are PARM and VALUE. */
2658
2659 tree
2660 build_tree_list_stat (tree parm, tree value MEM_STAT_DECL)
2661 {
2662 tree t = make_node_stat (TREE_LIST PASS_MEM_STAT);
2663 TREE_PURPOSE (t) = parm;
2664 TREE_VALUE (t) = value;
2665 return t;
2666 }
2667
2668 /* Build a chain of TREE_LIST nodes from a vector. */
2669
2670 tree
2671 build_tree_list_vec_stat (const vec<tree, va_gc> *vec MEM_STAT_DECL)
2672 {
2673 tree ret = NULL_TREE;
2674 tree *pp = &ret;
2675 unsigned int i;
2676 tree t;
2677 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
2678 {
2679 *pp = build_tree_list_stat (NULL, t PASS_MEM_STAT);
2680 pp = &TREE_CHAIN (*pp);
2681 }
2682 return ret;
2683 }
2684
2685 /* Return a newly created TREE_LIST node whose
2686 purpose and value fields are PURPOSE and VALUE
2687 and whose TREE_CHAIN is CHAIN. */
2688
2689 tree
2690 tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL)
2691 {
2692 tree node;
2693
2694 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
2695 memset (node, 0, sizeof (struct tree_common));
2696
2697 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
2698
2699 TREE_SET_CODE (node, TREE_LIST);
2700 TREE_CHAIN (node) = chain;
2701 TREE_PURPOSE (node) = purpose;
2702 TREE_VALUE (node) = value;
2703 return node;
2704 }
2705
2706 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2707 trees. */
2708
2709 vec<tree, va_gc> *
2710 ctor_to_vec (tree ctor)
2711 {
2712 vec<tree, va_gc> *vec;
2713 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
2714 unsigned int ix;
2715 tree val;
2716
2717 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
2718 vec->quick_push (val);
2719
2720 return vec;
2721 }
2722 \f
2723 /* Return the size nominally occupied by an object of type TYPE
2724 when it resides in memory. The value is measured in units of bytes,
2725 and its data type is that normally used for type sizes
2726 (which is the first type created by make_signed_type or
2727 make_unsigned_type). */
2728
2729 tree
2730 size_in_bytes (const_tree type)
2731 {
2732 tree t;
2733
2734 if (type == error_mark_node)
2735 return integer_zero_node;
2736
2737 type = TYPE_MAIN_VARIANT (type);
2738 t = TYPE_SIZE_UNIT (type);
2739
2740 if (t == 0)
2741 {
2742 lang_hooks.types.incomplete_type_error (NULL_TREE, type);
2743 return size_zero_node;
2744 }
2745
2746 return t;
2747 }
2748
2749 /* Return the size of TYPE (in bytes) as a wide integer
2750 or return -1 if the size can vary or is larger than an integer. */
2751
2752 HOST_WIDE_INT
2753 int_size_in_bytes (const_tree type)
2754 {
2755 tree t;
2756
2757 if (type == error_mark_node)
2758 return 0;
2759
2760 type = TYPE_MAIN_VARIANT (type);
2761 t = TYPE_SIZE_UNIT (type);
2762
2763 if (t && cst_fits_uhwi_p (t))
2764 return tree_to_hwi (t);
2765 else
2766 return -1;
2767 }
2768
2769 /* Return the maximum size of TYPE (in bytes) as a wide integer
2770 or return -1 if the size can vary or is larger than an integer. */
2771
2772 HOST_WIDE_INT
2773 max_int_size_in_bytes (const_tree type)
2774 {
2775 HOST_WIDE_INT size = -1;
2776 tree size_tree;
2777
2778 /* If this is an array type, check for a possible MAX_SIZE attached. */
2779
2780 if (TREE_CODE (type) == ARRAY_TYPE)
2781 {
2782 size_tree = TYPE_ARRAY_MAX_SIZE (type);
2783
2784 if (size_tree && tree_fits_uhwi_p (size_tree))
2785 size = tree_to_uhwi (size_tree);
2786 }
2787
2788 /* If we still haven't been able to get a size, see if the language
2789 can compute a maximum size. */
2790
2791 if (size == -1)
2792 {
2793 size_tree = lang_hooks.types.max_size (type);
2794
2795 if (size_tree && tree_fits_uhwi_p (size_tree))
2796 size = tree_to_uhwi (size_tree);
2797 }
2798
2799 return size;
2800 }
2801
2802 /* Returns a tree for the size of EXP in bytes. */
2803
2804 tree
2805 tree_expr_size (const_tree exp)
2806 {
2807 if (DECL_P (exp)
2808 && DECL_SIZE_UNIT (exp) != 0)
2809 return DECL_SIZE_UNIT (exp);
2810 else
2811 return size_in_bytes (TREE_TYPE (exp));
2812 }
2813 \f
2814 /* Return the bit position of FIELD, in bits from the start of the record.
2815 This is a tree of type bitsizetype. */
2816
2817 tree
2818 bit_position (const_tree field)
2819 {
2820 return bit_from_pos (DECL_FIELD_OFFSET (field),
2821 DECL_FIELD_BIT_OFFSET (field));
2822 }
2823
2824 /* Likewise, but return as an integer. It must be representable in
2825 that way (since it could be a signed value, we don't have the
2826 option of returning -1 like int_size_in_byte can. */
2827
2828 HOST_WIDE_INT
2829 int_bit_position (const_tree field)
2830 {
2831 return tree_to_shwi (bit_position (field));
2832 }
2833 \f
2834 /* Return the byte position of FIELD, in bytes from the start of the record.
2835 This is a tree of type sizetype. */
2836
2837 tree
2838 byte_position (const_tree field)
2839 {
2840 return byte_from_pos (DECL_FIELD_OFFSET (field),
2841 DECL_FIELD_BIT_OFFSET (field));
2842 }
2843
2844 /* Likewise, but return as an integer. It must be representable in
2845 that way (since it could be a signed value, we don't have the
2846 option of returning -1 like int_size_in_byte can. */
2847
2848 HOST_WIDE_INT
2849 int_byte_position (const_tree field)
2850 {
2851 return tree_to_shwi (byte_position (field));
2852 }
2853 \f
2854 /* Return the strictest alignment, in bits, that T is known to have. */
2855
2856 unsigned int
2857 expr_align (const_tree t)
2858 {
2859 unsigned int align0, align1;
2860
2861 switch (TREE_CODE (t))
2862 {
2863 CASE_CONVERT: case NON_LVALUE_EXPR:
2864 /* If we have conversions, we know that the alignment of the
2865 object must meet each of the alignments of the types. */
2866 align0 = expr_align (TREE_OPERAND (t, 0));
2867 align1 = TYPE_ALIGN (TREE_TYPE (t));
2868 return MAX (align0, align1);
2869
2870 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
2871 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
2872 case CLEANUP_POINT_EXPR:
2873 /* These don't change the alignment of an object. */
2874 return expr_align (TREE_OPERAND (t, 0));
2875
2876 case COND_EXPR:
2877 /* The best we can do is say that the alignment is the least aligned
2878 of the two arms. */
2879 align0 = expr_align (TREE_OPERAND (t, 1));
2880 align1 = expr_align (TREE_OPERAND (t, 2));
2881 return MIN (align0, align1);
2882
2883 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
2884 meaningfully, it's always 1. */
2885 case LABEL_DECL: case CONST_DECL:
2886 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
2887 case FUNCTION_DECL:
2888 gcc_assert (DECL_ALIGN (t) != 0);
2889 return DECL_ALIGN (t);
2890
2891 default:
2892 break;
2893 }
2894
2895 /* Otherwise take the alignment from that of the type. */
2896 return TYPE_ALIGN (TREE_TYPE (t));
2897 }
2898 \f
2899 /* Return, as a tree node, the number of elements for TYPE (which is an
2900 ARRAY_TYPE) minus one. This counts only elements of the top array. */
2901
2902 tree
2903 array_type_nelts (const_tree type)
2904 {
2905 tree index_type, min, max;
2906
2907 /* If they did it with unspecified bounds, then we should have already
2908 given an error about it before we got here. */
2909 if (! TYPE_DOMAIN (type))
2910 return error_mark_node;
2911
2912 index_type = TYPE_DOMAIN (type);
2913 min = TYPE_MIN_VALUE (index_type);
2914 max = TYPE_MAX_VALUE (index_type);
2915
2916 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
2917 if (!max)
2918 return error_mark_node;
2919
2920 return (integer_zerop (min)
2921 ? max
2922 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
2923 }
2924 \f
2925 /* If arg is static -- a reference to an object in static storage -- then
2926 return the object. This is not the same as the C meaning of `static'.
2927 If arg isn't static, return NULL. */
2928
2929 tree
2930 staticp (tree arg)
2931 {
2932 switch (TREE_CODE (arg))
2933 {
2934 case FUNCTION_DECL:
2935 /* Nested functions are static, even though taking their address will
2936 involve a trampoline as we unnest the nested function and create
2937 the trampoline on the tree level. */
2938 return arg;
2939
2940 case VAR_DECL:
2941 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
2942 && ! DECL_THREAD_LOCAL_P (arg)
2943 && ! DECL_DLLIMPORT_P (arg)
2944 ? arg : NULL);
2945
2946 case CONST_DECL:
2947 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
2948 ? arg : NULL);
2949
2950 case CONSTRUCTOR:
2951 return TREE_STATIC (arg) ? arg : NULL;
2952
2953 case LABEL_DECL:
2954 case STRING_CST:
2955 return arg;
2956
2957 case COMPONENT_REF:
2958 /* If the thing being referenced is not a field, then it is
2959 something language specific. */
2960 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
2961
2962 /* If we are referencing a bitfield, we can't evaluate an
2963 ADDR_EXPR at compile time and so it isn't a constant. */
2964 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
2965 return NULL;
2966
2967 return staticp (TREE_OPERAND (arg, 0));
2968
2969 case BIT_FIELD_REF:
2970 return NULL;
2971
2972 case INDIRECT_REF:
2973 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
2974
2975 case ARRAY_REF:
2976 case ARRAY_RANGE_REF:
2977 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
2978 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
2979 return staticp (TREE_OPERAND (arg, 0));
2980 else
2981 return NULL;
2982
2983 case COMPOUND_LITERAL_EXPR:
2984 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
2985
2986 default:
2987 return NULL;
2988 }
2989 }
2990
2991 \f
2992
2993
2994 /* Return whether OP is a DECL whose address is function-invariant. */
2995
2996 bool
2997 decl_address_invariant_p (const_tree op)
2998 {
2999 /* The conditions below are slightly less strict than the one in
3000 staticp. */
3001
3002 switch (TREE_CODE (op))
3003 {
3004 case PARM_DECL:
3005 case RESULT_DECL:
3006 case LABEL_DECL:
3007 case FUNCTION_DECL:
3008 return true;
3009
3010 case VAR_DECL:
3011 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3012 || DECL_THREAD_LOCAL_P (op)
3013 || DECL_CONTEXT (op) == current_function_decl
3014 || decl_function_context (op) == current_function_decl)
3015 return true;
3016 break;
3017
3018 case CONST_DECL:
3019 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3020 || decl_function_context (op) == current_function_decl)
3021 return true;
3022 break;
3023
3024 default:
3025 break;
3026 }
3027
3028 return false;
3029 }
3030
3031 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3032
3033 bool
3034 decl_address_ip_invariant_p (const_tree op)
3035 {
3036 /* The conditions below are slightly less strict than the one in
3037 staticp. */
3038
3039 switch (TREE_CODE (op))
3040 {
3041 case LABEL_DECL:
3042 case FUNCTION_DECL:
3043 case STRING_CST:
3044 return true;
3045
3046 case VAR_DECL:
3047 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3048 && !DECL_DLLIMPORT_P (op))
3049 || DECL_THREAD_LOCAL_P (op))
3050 return true;
3051 break;
3052
3053 case CONST_DECL:
3054 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3055 return true;
3056 break;
3057
3058 default:
3059 break;
3060 }
3061
3062 return false;
3063 }
3064
3065
3066 /* Return true if T is function-invariant (internal function, does
3067 not handle arithmetic; that's handled in skip_simple_arithmetic and
3068 tree_invariant_p). */
3069
3070 static bool tree_invariant_p (tree t);
3071
3072 static bool
3073 tree_invariant_p_1 (tree t)
3074 {
3075 tree op;
3076
3077 if (TREE_CONSTANT (t)
3078 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3079 return true;
3080
3081 switch (TREE_CODE (t))
3082 {
3083 case SAVE_EXPR:
3084 return true;
3085
3086 case ADDR_EXPR:
3087 op = TREE_OPERAND (t, 0);
3088 while (handled_component_p (op))
3089 {
3090 switch (TREE_CODE (op))
3091 {
3092 case ARRAY_REF:
3093 case ARRAY_RANGE_REF:
3094 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3095 || TREE_OPERAND (op, 2) != NULL_TREE
3096 || TREE_OPERAND (op, 3) != NULL_TREE)
3097 return false;
3098 break;
3099
3100 case COMPONENT_REF:
3101 if (TREE_OPERAND (op, 2) != NULL_TREE)
3102 return false;
3103 break;
3104
3105 default:;
3106 }
3107 op = TREE_OPERAND (op, 0);
3108 }
3109
3110 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3111
3112 default:
3113 break;
3114 }
3115
3116 return false;
3117 }
3118
3119 /* Return true if T is function-invariant. */
3120
3121 static bool
3122 tree_invariant_p (tree t)
3123 {
3124 tree inner = skip_simple_arithmetic (t);
3125 return tree_invariant_p_1 (inner);
3126 }
3127
3128 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3129 Do this to any expression which may be used in more than one place,
3130 but must be evaluated only once.
3131
3132 Normally, expand_expr would reevaluate the expression each time.
3133 Calling save_expr produces something that is evaluated and recorded
3134 the first time expand_expr is called on it. Subsequent calls to
3135 expand_expr just reuse the recorded value.
3136
3137 The call to expand_expr that generates code that actually computes
3138 the value is the first call *at compile time*. Subsequent calls
3139 *at compile time* generate code to use the saved value.
3140 This produces correct result provided that *at run time* control
3141 always flows through the insns made by the first expand_expr
3142 before reaching the other places where the save_expr was evaluated.
3143 You, the caller of save_expr, must make sure this is so.
3144
3145 Constants, and certain read-only nodes, are returned with no
3146 SAVE_EXPR because that is safe. Expressions containing placeholders
3147 are not touched; see tree.def for an explanation of what these
3148 are used for. */
3149
3150 tree
3151 save_expr (tree expr)
3152 {
3153 tree t = fold (expr);
3154 tree inner;
3155
3156 /* If the tree evaluates to a constant, then we don't want to hide that
3157 fact (i.e. this allows further folding, and direct checks for constants).
3158 However, a read-only object that has side effects cannot be bypassed.
3159 Since it is no problem to reevaluate literals, we just return the
3160 literal node. */
3161 inner = skip_simple_arithmetic (t);
3162 if (TREE_CODE (inner) == ERROR_MARK)
3163 return inner;
3164
3165 if (tree_invariant_p_1 (inner))
3166 return t;
3167
3168 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3169 it means that the size or offset of some field of an object depends on
3170 the value within another field.
3171
3172 Note that it must not be the case that T contains both a PLACEHOLDER_EXPR
3173 and some variable since it would then need to be both evaluated once and
3174 evaluated more than once. Front-ends must assure this case cannot
3175 happen by surrounding any such subexpressions in their own SAVE_EXPR
3176 and forcing evaluation at the proper time. */
3177 if (contains_placeholder_p (inner))
3178 return t;
3179
3180 t = build1 (SAVE_EXPR, TREE_TYPE (expr), t);
3181 SET_EXPR_LOCATION (t, EXPR_LOCATION (expr));
3182
3183 /* This expression might be placed ahead of a jump to ensure that the
3184 value was computed on both sides of the jump. So make sure it isn't
3185 eliminated as dead. */
3186 TREE_SIDE_EFFECTS (t) = 1;
3187 return t;
3188 }
3189
3190 /* Look inside EXPR into any simple arithmetic operations. Return the
3191 outermost non-arithmetic or non-invariant node. */
3192
3193 tree
3194 skip_simple_arithmetic (tree expr)
3195 {
3196 /* We don't care about whether this can be used as an lvalue in this
3197 context. */
3198 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3199 expr = TREE_OPERAND (expr, 0);
3200
3201 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3202 a constant, it will be more efficient to not make another SAVE_EXPR since
3203 it will allow better simplification and GCSE will be able to merge the
3204 computations if they actually occur. */
3205 while (true)
3206 {
3207 if (UNARY_CLASS_P (expr))
3208 expr = TREE_OPERAND (expr, 0);
3209 else if (BINARY_CLASS_P (expr))
3210 {
3211 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3212 expr = TREE_OPERAND (expr, 0);
3213 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3214 expr = TREE_OPERAND (expr, 1);
3215 else
3216 break;
3217 }
3218 else
3219 break;
3220 }
3221
3222 return expr;
3223 }
3224
3225 /* Look inside EXPR into simple arithmetic operations involving constants.
3226 Return the outermost non-arithmetic or non-constant node. */
3227
3228 tree
3229 skip_simple_constant_arithmetic (tree expr)
3230 {
3231 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3232 expr = TREE_OPERAND (expr, 0);
3233
3234 while (true)
3235 {
3236 if (UNARY_CLASS_P (expr))
3237 expr = TREE_OPERAND (expr, 0);
3238 else if (BINARY_CLASS_P (expr))
3239 {
3240 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3241 expr = TREE_OPERAND (expr, 0);
3242 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3243 expr = TREE_OPERAND (expr, 1);
3244 else
3245 break;
3246 }
3247 else
3248 break;
3249 }
3250
3251 return expr;
3252 }
3253
3254 /* Return which tree structure is used by T. */
3255
3256 enum tree_node_structure_enum
3257 tree_node_structure (const_tree t)
3258 {
3259 const enum tree_code code = TREE_CODE (t);
3260 return tree_node_structure_for_code (code);
3261 }
3262
3263 /* Set various status flags when building a CALL_EXPR object T. */
3264
3265 static void
3266 process_call_operands (tree t)
3267 {
3268 bool side_effects = TREE_SIDE_EFFECTS (t);
3269 bool read_only = false;
3270 int i = call_expr_flags (t);
3271
3272 /* Calls have side-effects, except those to const or pure functions. */
3273 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3274 side_effects = true;
3275 /* Propagate TREE_READONLY of arguments for const functions. */
3276 if (i & ECF_CONST)
3277 read_only = true;
3278
3279 if (!side_effects || read_only)
3280 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3281 {
3282 tree op = TREE_OPERAND (t, i);
3283 if (op && TREE_SIDE_EFFECTS (op))
3284 side_effects = true;
3285 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3286 read_only = false;
3287 }
3288
3289 TREE_SIDE_EFFECTS (t) = side_effects;
3290 TREE_READONLY (t) = read_only;
3291 }
3292 \f
3293 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3294 size or offset that depends on a field within a record. */
3295
3296 bool
3297 contains_placeholder_p (const_tree exp)
3298 {
3299 enum tree_code code;
3300
3301 if (!exp)
3302 return 0;
3303
3304 code = TREE_CODE (exp);
3305 if (code == PLACEHOLDER_EXPR)
3306 return 1;
3307
3308 switch (TREE_CODE_CLASS (code))
3309 {
3310 case tcc_reference:
3311 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3312 position computations since they will be converted into a
3313 WITH_RECORD_EXPR involving the reference, which will assume
3314 here will be valid. */
3315 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3316
3317 case tcc_exceptional:
3318 if (code == TREE_LIST)
3319 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3320 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3321 break;
3322
3323 case tcc_unary:
3324 case tcc_binary:
3325 case tcc_comparison:
3326 case tcc_expression:
3327 switch (code)
3328 {
3329 case COMPOUND_EXPR:
3330 /* Ignoring the first operand isn't quite right, but works best. */
3331 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3332
3333 case COND_EXPR:
3334 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3335 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3336 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3337
3338 case SAVE_EXPR:
3339 /* The save_expr function never wraps anything containing
3340 a PLACEHOLDER_EXPR. */
3341 return 0;
3342
3343 default:
3344 break;
3345 }
3346
3347 switch (TREE_CODE_LENGTH (code))
3348 {
3349 case 1:
3350 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3351 case 2:
3352 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3353 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3354 default:
3355 return 0;
3356 }
3357
3358 case tcc_vl_exp:
3359 switch (code)
3360 {
3361 case CALL_EXPR:
3362 {
3363 const_tree arg;
3364 const_call_expr_arg_iterator iter;
3365 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3366 if (CONTAINS_PLACEHOLDER_P (arg))
3367 return 1;
3368 return 0;
3369 }
3370 default:
3371 return 0;
3372 }
3373
3374 default:
3375 return 0;
3376 }
3377 return 0;
3378 }
3379
3380 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3381 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3382 field positions. */
3383
3384 static bool
3385 type_contains_placeholder_1 (const_tree type)
3386 {
3387 /* If the size contains a placeholder or the parent type (component type in
3388 the case of arrays) type involves a placeholder, this type does. */
3389 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3390 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3391 || (!POINTER_TYPE_P (type)
3392 && TREE_TYPE (type)
3393 && type_contains_placeholder_p (TREE_TYPE (type))))
3394 return true;
3395
3396 /* Now do type-specific checks. Note that the last part of the check above
3397 greatly limits what we have to do below. */
3398 switch (TREE_CODE (type))
3399 {
3400 case VOID_TYPE:
3401 case POINTER_BOUNDS_TYPE:
3402 case COMPLEX_TYPE:
3403 case ENUMERAL_TYPE:
3404 case BOOLEAN_TYPE:
3405 case POINTER_TYPE:
3406 case OFFSET_TYPE:
3407 case REFERENCE_TYPE:
3408 case METHOD_TYPE:
3409 case FUNCTION_TYPE:
3410 case VECTOR_TYPE:
3411 case NULLPTR_TYPE:
3412 return false;
3413
3414 case INTEGER_TYPE:
3415 case REAL_TYPE:
3416 case FIXED_POINT_TYPE:
3417 /* Here we just check the bounds. */
3418 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3419 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3420
3421 case ARRAY_TYPE:
3422 /* We have already checked the component type above, so just check the
3423 domain type. */
3424 return type_contains_placeholder_p (TYPE_DOMAIN (type));
3425
3426 case RECORD_TYPE:
3427 case UNION_TYPE:
3428 case QUAL_UNION_TYPE:
3429 {
3430 tree field;
3431
3432 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3433 if (TREE_CODE (field) == FIELD_DECL
3434 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3435 || (TREE_CODE (type) == QUAL_UNION_TYPE
3436 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3437 || type_contains_placeholder_p (TREE_TYPE (field))))
3438 return true;
3439
3440 return false;
3441 }
3442
3443 default:
3444 gcc_unreachable ();
3445 }
3446 }
3447
3448 /* Wrapper around above function used to cache its result. */
3449
3450 bool
3451 type_contains_placeholder_p (tree type)
3452 {
3453 bool result;
3454
3455 /* If the contains_placeholder_bits field has been initialized,
3456 then we know the answer. */
3457 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3458 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3459
3460 /* Indicate that we've seen this type node, and the answer is false.
3461 This is what we want to return if we run into recursion via fields. */
3462 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3463
3464 /* Compute the real value. */
3465 result = type_contains_placeholder_1 (type);
3466
3467 /* Store the real value. */
3468 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3469
3470 return result;
3471 }
3472 \f
3473 /* Push tree EXP onto vector QUEUE if it is not already present. */
3474
3475 static void
3476 push_without_duplicates (tree exp, vec<tree> *queue)
3477 {
3478 unsigned int i;
3479 tree iter;
3480
3481 FOR_EACH_VEC_ELT (*queue, i, iter)
3482 if (simple_cst_equal (iter, exp) == 1)
3483 break;
3484
3485 if (!iter)
3486 queue->safe_push (exp);
3487 }
3488
3489 /* Given a tree EXP, find all occurrences of references to fields
3490 in a PLACEHOLDER_EXPR and place them in vector REFS without
3491 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3492 we assume here that EXP contains only arithmetic expressions
3493 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3494 argument list. */
3495
3496 void
3497 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3498 {
3499 enum tree_code code = TREE_CODE (exp);
3500 tree inner;
3501 int i;
3502
3503 /* We handle TREE_LIST and COMPONENT_REF separately. */
3504 if (code == TREE_LIST)
3505 {
3506 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3507 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3508 }
3509 else if (code == COMPONENT_REF)
3510 {
3511 for (inner = TREE_OPERAND (exp, 0);
3512 REFERENCE_CLASS_P (inner);
3513 inner = TREE_OPERAND (inner, 0))
3514 ;
3515
3516 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3517 push_without_duplicates (exp, refs);
3518 else
3519 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3520 }
3521 else
3522 switch (TREE_CODE_CLASS (code))
3523 {
3524 case tcc_constant:
3525 break;
3526
3527 case tcc_declaration:
3528 /* Variables allocated to static storage can stay. */
3529 if (!TREE_STATIC (exp))
3530 push_without_duplicates (exp, refs);
3531 break;
3532
3533 case tcc_expression:
3534 /* This is the pattern built in ada/make_aligning_type. */
3535 if (code == ADDR_EXPR
3536 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3537 {
3538 push_without_duplicates (exp, refs);
3539 break;
3540 }
3541
3542 /* Fall through... */
3543
3544 case tcc_exceptional:
3545 case tcc_unary:
3546 case tcc_binary:
3547 case tcc_comparison:
3548 case tcc_reference:
3549 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3550 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3551 break;
3552
3553 case tcc_vl_exp:
3554 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3555 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3556 break;
3557
3558 default:
3559 gcc_unreachable ();
3560 }
3561 }
3562
3563 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3564 return a tree with all occurrences of references to F in a
3565 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3566 CONST_DECLs. Note that we assume here that EXP contains only
3567 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3568 occurring only in their argument list. */
3569
3570 tree
3571 substitute_in_expr (tree exp, tree f, tree r)
3572 {
3573 enum tree_code code = TREE_CODE (exp);
3574 tree op0, op1, op2, op3;
3575 tree new_tree;
3576
3577 /* We handle TREE_LIST and COMPONENT_REF separately. */
3578 if (code == TREE_LIST)
3579 {
3580 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3581 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3582 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3583 return exp;
3584
3585 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3586 }
3587 else if (code == COMPONENT_REF)
3588 {
3589 tree inner;
3590
3591 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3592 and it is the right field, replace it with R. */
3593 for (inner = TREE_OPERAND (exp, 0);
3594 REFERENCE_CLASS_P (inner);
3595 inner = TREE_OPERAND (inner, 0))
3596 ;
3597
3598 /* The field. */
3599 op1 = TREE_OPERAND (exp, 1);
3600
3601 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3602 return r;
3603
3604 /* If this expression hasn't been completed let, leave it alone. */
3605 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
3606 return exp;
3607
3608 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3609 if (op0 == TREE_OPERAND (exp, 0))
3610 return exp;
3611
3612 new_tree
3613 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
3614 }
3615 else
3616 switch (TREE_CODE_CLASS (code))
3617 {
3618 case tcc_constant:
3619 return exp;
3620
3621 case tcc_declaration:
3622 if (exp == f)
3623 return r;
3624 else
3625 return exp;
3626
3627 case tcc_expression:
3628 if (exp == f)
3629 return r;
3630
3631 /* Fall through... */
3632
3633 case tcc_exceptional:
3634 case tcc_unary:
3635 case tcc_binary:
3636 case tcc_comparison:
3637 case tcc_reference:
3638 switch (TREE_CODE_LENGTH (code))
3639 {
3640 case 0:
3641 return exp;
3642
3643 case 1:
3644 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3645 if (op0 == TREE_OPERAND (exp, 0))
3646 return exp;
3647
3648 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3649 break;
3650
3651 case 2:
3652 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3653 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3654
3655 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3656 return exp;
3657
3658 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3659 break;
3660
3661 case 3:
3662 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3663 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3664 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3665
3666 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3667 && op2 == TREE_OPERAND (exp, 2))
3668 return exp;
3669
3670 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3671 break;
3672
3673 case 4:
3674 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3675 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3676 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3677 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
3678
3679 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3680 && op2 == TREE_OPERAND (exp, 2)
3681 && op3 == TREE_OPERAND (exp, 3))
3682 return exp;
3683
3684 new_tree
3685 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3686 break;
3687
3688 default:
3689 gcc_unreachable ();
3690 }
3691 break;
3692
3693 case tcc_vl_exp:
3694 {
3695 int i;
3696
3697 new_tree = NULL_TREE;
3698
3699 /* If we are trying to replace F with a constant, inline back
3700 functions which do nothing else than computing a value from
3701 the arguments they are passed. This makes it possible to
3702 fold partially or entirely the replacement expression. */
3703 if (CONSTANT_CLASS_P (r) && code == CALL_EXPR)
3704 {
3705 tree t = maybe_inline_call_in_expr (exp);
3706 if (t)
3707 return SUBSTITUTE_IN_EXPR (t, f, r);
3708 }
3709
3710 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3711 {
3712 tree op = TREE_OPERAND (exp, i);
3713 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
3714 if (new_op != op)
3715 {
3716 if (!new_tree)
3717 new_tree = copy_node (exp);
3718 TREE_OPERAND (new_tree, i) = new_op;
3719 }
3720 }
3721
3722 if (new_tree)
3723 {
3724 new_tree = fold (new_tree);
3725 if (TREE_CODE (new_tree) == CALL_EXPR)
3726 process_call_operands (new_tree);
3727 }
3728 else
3729 return exp;
3730 }
3731 break;
3732
3733 default:
3734 gcc_unreachable ();
3735 }
3736
3737 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3738
3739 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3740 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3741
3742 return new_tree;
3743 }
3744
3745 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3746 for it within OBJ, a tree that is an object or a chain of references. */
3747
3748 tree
3749 substitute_placeholder_in_expr (tree exp, tree obj)
3750 {
3751 enum tree_code code = TREE_CODE (exp);
3752 tree op0, op1, op2, op3;
3753 tree new_tree;
3754
3755 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3756 in the chain of OBJ. */
3757 if (code == PLACEHOLDER_EXPR)
3758 {
3759 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
3760 tree elt;
3761
3762 for (elt = obj; elt != 0;
3763 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3764 || TREE_CODE (elt) == COND_EXPR)
3765 ? TREE_OPERAND (elt, 1)
3766 : (REFERENCE_CLASS_P (elt)
3767 || UNARY_CLASS_P (elt)
3768 || BINARY_CLASS_P (elt)
3769 || VL_EXP_CLASS_P (elt)
3770 || EXPRESSION_CLASS_P (elt))
3771 ? TREE_OPERAND (elt, 0) : 0))
3772 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
3773 return elt;
3774
3775 for (elt = obj; elt != 0;
3776 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3777 || TREE_CODE (elt) == COND_EXPR)
3778 ? TREE_OPERAND (elt, 1)
3779 : (REFERENCE_CLASS_P (elt)
3780 || UNARY_CLASS_P (elt)
3781 || BINARY_CLASS_P (elt)
3782 || VL_EXP_CLASS_P (elt)
3783 || EXPRESSION_CLASS_P (elt))
3784 ? TREE_OPERAND (elt, 0) : 0))
3785 if (POINTER_TYPE_P (TREE_TYPE (elt))
3786 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
3787 == need_type))
3788 return fold_build1 (INDIRECT_REF, need_type, elt);
3789
3790 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
3791 survives until RTL generation, there will be an error. */
3792 return exp;
3793 }
3794
3795 /* TREE_LIST is special because we need to look at TREE_VALUE
3796 and TREE_CHAIN, not TREE_OPERANDS. */
3797 else if (code == TREE_LIST)
3798 {
3799 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
3800 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
3801 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3802 return exp;
3803
3804 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3805 }
3806 else
3807 switch (TREE_CODE_CLASS (code))
3808 {
3809 case tcc_constant:
3810 case tcc_declaration:
3811 return exp;
3812
3813 case tcc_exceptional:
3814 case tcc_unary:
3815 case tcc_binary:
3816 case tcc_comparison:
3817 case tcc_expression:
3818 case tcc_reference:
3819 case tcc_statement:
3820 switch (TREE_CODE_LENGTH (code))
3821 {
3822 case 0:
3823 return exp;
3824
3825 case 1:
3826 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3827 if (op0 == TREE_OPERAND (exp, 0))
3828 return exp;
3829
3830 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3831 break;
3832
3833 case 2:
3834 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3835 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3836
3837 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3838 return exp;
3839
3840 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3841 break;
3842
3843 case 3:
3844 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3845 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3846 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3847
3848 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3849 && op2 == TREE_OPERAND (exp, 2))
3850 return exp;
3851
3852 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3853 break;
3854
3855 case 4:
3856 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3857 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3858 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3859 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
3860
3861 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3862 && op2 == TREE_OPERAND (exp, 2)
3863 && op3 == TREE_OPERAND (exp, 3))
3864 return exp;
3865
3866 new_tree
3867 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3868 break;
3869
3870 default:
3871 gcc_unreachable ();
3872 }
3873 break;
3874
3875 case tcc_vl_exp:
3876 {
3877 int i;
3878
3879 new_tree = NULL_TREE;
3880
3881 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3882 {
3883 tree op = TREE_OPERAND (exp, i);
3884 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
3885 if (new_op != op)
3886 {
3887 if (!new_tree)
3888 new_tree = copy_node (exp);
3889 TREE_OPERAND (new_tree, i) = new_op;
3890 }
3891 }
3892
3893 if (new_tree)
3894 {
3895 new_tree = fold (new_tree);
3896 if (TREE_CODE (new_tree) == CALL_EXPR)
3897 process_call_operands (new_tree);
3898 }
3899 else
3900 return exp;
3901 }
3902 break;
3903
3904 default:
3905 gcc_unreachable ();
3906 }
3907
3908 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3909
3910 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3911 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3912
3913 return new_tree;
3914 }
3915 \f
3916 /* Stabilize a reference so that we can use it any number of times
3917 without causing its operands to be evaluated more than once.
3918 Returns the stabilized reference. This works by means of save_expr,
3919 so see the caveats in the comments about save_expr.
3920
3921 Also allows conversion expressions whose operands are references.
3922 Any other kind of expression is returned unchanged. */
3923
3924 tree
3925 stabilize_reference (tree ref)
3926 {
3927 tree result;
3928 enum tree_code code = TREE_CODE (ref);
3929
3930 switch (code)
3931 {
3932 case VAR_DECL:
3933 case PARM_DECL:
3934 case RESULT_DECL:
3935 /* No action is needed in this case. */
3936 return ref;
3937
3938 CASE_CONVERT:
3939 case FLOAT_EXPR:
3940 case FIX_TRUNC_EXPR:
3941 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
3942 break;
3943
3944 case INDIRECT_REF:
3945 result = build_nt (INDIRECT_REF,
3946 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
3947 break;
3948
3949 case COMPONENT_REF:
3950 result = build_nt (COMPONENT_REF,
3951 stabilize_reference (TREE_OPERAND (ref, 0)),
3952 TREE_OPERAND (ref, 1), NULL_TREE);
3953 break;
3954
3955 case BIT_FIELD_REF:
3956 result = build_nt (BIT_FIELD_REF,
3957 stabilize_reference (TREE_OPERAND (ref, 0)),
3958 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
3959 break;
3960
3961 case ARRAY_REF:
3962 result = build_nt (ARRAY_REF,
3963 stabilize_reference (TREE_OPERAND (ref, 0)),
3964 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
3965 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
3966 break;
3967
3968 case ARRAY_RANGE_REF:
3969 result = build_nt (ARRAY_RANGE_REF,
3970 stabilize_reference (TREE_OPERAND (ref, 0)),
3971 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
3972 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
3973 break;
3974
3975 case COMPOUND_EXPR:
3976 /* We cannot wrap the first expression in a SAVE_EXPR, as then
3977 it wouldn't be ignored. This matters when dealing with
3978 volatiles. */
3979 return stabilize_reference_1 (ref);
3980
3981 /* If arg isn't a kind of lvalue we recognize, make no change.
3982 Caller should recognize the error for an invalid lvalue. */
3983 default:
3984 return ref;
3985
3986 case ERROR_MARK:
3987 return error_mark_node;
3988 }
3989
3990 TREE_TYPE (result) = TREE_TYPE (ref);
3991 TREE_READONLY (result) = TREE_READONLY (ref);
3992 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
3993 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
3994
3995 return result;
3996 }
3997
3998 /* Subroutine of stabilize_reference; this is called for subtrees of
3999 references. Any expression with side-effects must be put in a SAVE_EXPR
4000 to ensure that it is only evaluated once.
4001
4002 We don't put SAVE_EXPR nodes around everything, because assigning very
4003 simple expressions to temporaries causes us to miss good opportunities
4004 for optimizations. Among other things, the opportunity to fold in the
4005 addition of a constant into an addressing mode often gets lost, e.g.
4006 "y[i+1] += x;". In general, we take the approach that we should not make
4007 an assignment unless we are forced into it - i.e., that any non-side effect
4008 operator should be allowed, and that cse should take care of coalescing
4009 multiple utterances of the same expression should that prove fruitful. */
4010
4011 tree
4012 stabilize_reference_1 (tree e)
4013 {
4014 tree result;
4015 enum tree_code code = TREE_CODE (e);
4016
4017 /* We cannot ignore const expressions because it might be a reference
4018 to a const array but whose index contains side-effects. But we can
4019 ignore things that are actual constant or that already have been
4020 handled by this function. */
4021
4022 if (tree_invariant_p (e))
4023 return e;
4024
4025 switch (TREE_CODE_CLASS (code))
4026 {
4027 case tcc_exceptional:
4028 case tcc_type:
4029 case tcc_declaration:
4030 case tcc_comparison:
4031 case tcc_statement:
4032 case tcc_expression:
4033 case tcc_reference:
4034 case tcc_vl_exp:
4035 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4036 so that it will only be evaluated once. */
4037 /* The reference (r) and comparison (<) classes could be handled as
4038 below, but it is generally faster to only evaluate them once. */
4039 if (TREE_SIDE_EFFECTS (e))
4040 return save_expr (e);
4041 return e;
4042
4043 case tcc_constant:
4044 /* Constants need no processing. In fact, we should never reach
4045 here. */
4046 return e;
4047
4048 case tcc_binary:
4049 /* Division is slow and tends to be compiled with jumps,
4050 especially the division by powers of 2 that is often
4051 found inside of an array reference. So do it just once. */
4052 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4053 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4054 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4055 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4056 return save_expr (e);
4057 /* Recursively stabilize each operand. */
4058 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4059 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4060 break;
4061
4062 case tcc_unary:
4063 /* Recursively stabilize each operand. */
4064 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4065 break;
4066
4067 default:
4068 gcc_unreachable ();
4069 }
4070
4071 TREE_TYPE (result) = TREE_TYPE (e);
4072 TREE_READONLY (result) = TREE_READONLY (e);
4073 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4074 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4075
4076 return result;
4077 }
4078 \f
4079 /* Low-level constructors for expressions. */
4080
4081 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4082 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4083
4084 void
4085 recompute_tree_invariant_for_addr_expr (tree t)
4086 {
4087 tree node;
4088 bool tc = true, se = false;
4089
4090 /* We started out assuming this address is both invariant and constant, but
4091 does not have side effects. Now go down any handled components and see if
4092 any of them involve offsets that are either non-constant or non-invariant.
4093 Also check for side-effects.
4094
4095 ??? Note that this code makes no attempt to deal with the case where
4096 taking the address of something causes a copy due to misalignment. */
4097
4098 #define UPDATE_FLAGS(NODE) \
4099 do { tree _node = (NODE); \
4100 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4101 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4102
4103 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4104 node = TREE_OPERAND (node, 0))
4105 {
4106 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4107 array reference (probably made temporarily by the G++ front end),
4108 so ignore all the operands. */
4109 if ((TREE_CODE (node) == ARRAY_REF
4110 || TREE_CODE (node) == ARRAY_RANGE_REF)
4111 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4112 {
4113 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4114 if (TREE_OPERAND (node, 2))
4115 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4116 if (TREE_OPERAND (node, 3))
4117 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4118 }
4119 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4120 FIELD_DECL, apparently. The G++ front end can put something else
4121 there, at least temporarily. */
4122 else if (TREE_CODE (node) == COMPONENT_REF
4123 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4124 {
4125 if (TREE_OPERAND (node, 2))
4126 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4127 }
4128 }
4129
4130 node = lang_hooks.expr_to_decl (node, &tc, &se);
4131
4132 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4133 the address, since &(*a)->b is a form of addition. If it's a constant, the
4134 address is constant too. If it's a decl, its address is constant if the
4135 decl is static. Everything else is not constant and, furthermore,
4136 taking the address of a volatile variable is not volatile. */
4137 if (TREE_CODE (node) == INDIRECT_REF
4138 || TREE_CODE (node) == MEM_REF)
4139 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4140 else if (CONSTANT_CLASS_P (node))
4141 ;
4142 else if (DECL_P (node))
4143 tc &= (staticp (node) != NULL_TREE);
4144 else
4145 {
4146 tc = false;
4147 se |= TREE_SIDE_EFFECTS (node);
4148 }
4149
4150
4151 TREE_CONSTANT (t) = tc;
4152 TREE_SIDE_EFFECTS (t) = se;
4153 #undef UPDATE_FLAGS
4154 }
4155
4156 /* Build an expression of code CODE, data type TYPE, and operands as
4157 specified. Expressions and reference nodes can be created this way.
4158 Constants, decls, types and misc nodes cannot be.
4159
4160 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4161 enough for all extant tree codes. */
4162
4163 tree
4164 build0_stat (enum tree_code code, tree tt MEM_STAT_DECL)
4165 {
4166 tree t;
4167
4168 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4169
4170 t = make_node_stat (code PASS_MEM_STAT);
4171 TREE_TYPE (t) = tt;
4172
4173 return t;
4174 }
4175
4176 tree
4177 build1_stat (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4178 {
4179 int length = sizeof (struct tree_exp);
4180 tree t;
4181
4182 record_node_allocation_statistics (code, length);
4183
4184 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4185
4186 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4187
4188 memset (t, 0, sizeof (struct tree_common));
4189
4190 TREE_SET_CODE (t, code);
4191
4192 TREE_TYPE (t) = type;
4193 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4194 TREE_OPERAND (t, 0) = node;
4195 if (node && !TYPE_P (node))
4196 {
4197 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4198 TREE_READONLY (t) = TREE_READONLY (node);
4199 }
4200
4201 if (TREE_CODE_CLASS (code) == tcc_statement)
4202 TREE_SIDE_EFFECTS (t) = 1;
4203 else switch (code)
4204 {
4205 case VA_ARG_EXPR:
4206 /* All of these have side-effects, no matter what their
4207 operands are. */
4208 TREE_SIDE_EFFECTS (t) = 1;
4209 TREE_READONLY (t) = 0;
4210 break;
4211
4212 case INDIRECT_REF:
4213 /* Whether a dereference is readonly has nothing to do with whether
4214 its operand is readonly. */
4215 TREE_READONLY (t) = 0;
4216 break;
4217
4218 case ADDR_EXPR:
4219 if (node)
4220 recompute_tree_invariant_for_addr_expr (t);
4221 break;
4222
4223 default:
4224 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4225 && node && !TYPE_P (node)
4226 && TREE_CONSTANT (node))
4227 TREE_CONSTANT (t) = 1;
4228 if (TREE_CODE_CLASS (code) == tcc_reference
4229 && node && TREE_THIS_VOLATILE (node))
4230 TREE_THIS_VOLATILE (t) = 1;
4231 break;
4232 }
4233
4234 return t;
4235 }
4236
4237 #define PROCESS_ARG(N) \
4238 do { \
4239 TREE_OPERAND (t, N) = arg##N; \
4240 if (arg##N &&!TYPE_P (arg##N)) \
4241 { \
4242 if (TREE_SIDE_EFFECTS (arg##N)) \
4243 side_effects = 1; \
4244 if (!TREE_READONLY (arg##N) \
4245 && !CONSTANT_CLASS_P (arg##N)) \
4246 (void) (read_only = 0); \
4247 if (!TREE_CONSTANT (arg##N)) \
4248 (void) (constant = 0); \
4249 } \
4250 } while (0)
4251
4252 tree
4253 build2_stat (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4254 {
4255 bool constant, read_only, side_effects;
4256 tree t;
4257
4258 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4259
4260 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4261 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4262 /* When sizetype precision doesn't match that of pointers
4263 we need to be able to build explicit extensions or truncations
4264 of the offset argument. */
4265 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4266 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4267 && TREE_CODE (arg1) == INTEGER_CST);
4268
4269 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4270 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4271 && ptrofftype_p (TREE_TYPE (arg1)));
4272
4273 t = make_node_stat (code PASS_MEM_STAT);
4274 TREE_TYPE (t) = tt;
4275
4276 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4277 result based on those same flags for the arguments. But if the
4278 arguments aren't really even `tree' expressions, we shouldn't be trying
4279 to do this. */
4280
4281 /* Expressions without side effects may be constant if their
4282 arguments are as well. */
4283 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4284 || TREE_CODE_CLASS (code) == tcc_binary);
4285 read_only = 1;
4286 side_effects = TREE_SIDE_EFFECTS (t);
4287
4288 PROCESS_ARG (0);
4289 PROCESS_ARG (1);
4290
4291 TREE_READONLY (t) = read_only;
4292 TREE_CONSTANT (t) = constant;
4293 TREE_SIDE_EFFECTS (t) = side_effects;
4294 TREE_THIS_VOLATILE (t)
4295 = (TREE_CODE_CLASS (code) == tcc_reference
4296 && arg0 && TREE_THIS_VOLATILE (arg0));
4297
4298 return t;
4299 }
4300
4301
4302 tree
4303 build3_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4304 tree arg2 MEM_STAT_DECL)
4305 {
4306 bool constant, read_only, side_effects;
4307 tree t;
4308
4309 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4310 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4311
4312 t = make_node_stat (code PASS_MEM_STAT);
4313 TREE_TYPE (t) = tt;
4314
4315 read_only = 1;
4316
4317 /* As a special exception, if COND_EXPR has NULL branches, we
4318 assume that it is a gimple statement and always consider
4319 it to have side effects. */
4320 if (code == COND_EXPR
4321 && tt == void_type_node
4322 && arg1 == NULL_TREE
4323 && arg2 == NULL_TREE)
4324 side_effects = true;
4325 else
4326 side_effects = TREE_SIDE_EFFECTS (t);
4327
4328 PROCESS_ARG (0);
4329 PROCESS_ARG (1);
4330 PROCESS_ARG (2);
4331
4332 if (code == COND_EXPR)
4333 TREE_READONLY (t) = read_only;
4334
4335 TREE_SIDE_EFFECTS (t) = side_effects;
4336 TREE_THIS_VOLATILE (t)
4337 = (TREE_CODE_CLASS (code) == tcc_reference
4338 && arg0 && TREE_THIS_VOLATILE (arg0));
4339
4340 return t;
4341 }
4342
4343 tree
4344 build4_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4345 tree arg2, tree arg3 MEM_STAT_DECL)
4346 {
4347 bool constant, read_only, side_effects;
4348 tree t;
4349
4350 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4351
4352 t = make_node_stat (code PASS_MEM_STAT);
4353 TREE_TYPE (t) = tt;
4354
4355 side_effects = TREE_SIDE_EFFECTS (t);
4356
4357 PROCESS_ARG (0);
4358 PROCESS_ARG (1);
4359 PROCESS_ARG (2);
4360 PROCESS_ARG (3);
4361
4362 TREE_SIDE_EFFECTS (t) = side_effects;
4363 TREE_THIS_VOLATILE (t)
4364 = (TREE_CODE_CLASS (code) == tcc_reference
4365 && arg0 && TREE_THIS_VOLATILE (arg0));
4366
4367 return t;
4368 }
4369
4370 tree
4371 build5_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4372 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4373 {
4374 bool constant, read_only, side_effects;
4375 tree t;
4376
4377 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4378
4379 t = make_node_stat (code PASS_MEM_STAT);
4380 TREE_TYPE (t) = tt;
4381
4382 side_effects = TREE_SIDE_EFFECTS (t);
4383
4384 PROCESS_ARG (0);
4385 PROCESS_ARG (1);
4386 PROCESS_ARG (2);
4387 PROCESS_ARG (3);
4388 PROCESS_ARG (4);
4389
4390 TREE_SIDE_EFFECTS (t) = side_effects;
4391 TREE_THIS_VOLATILE (t)
4392 = (TREE_CODE_CLASS (code) == tcc_reference
4393 && arg0 && TREE_THIS_VOLATILE (arg0));
4394
4395 return t;
4396 }
4397
4398 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4399 on the pointer PTR. */
4400
4401 tree
4402 build_simple_mem_ref_loc (location_t loc, tree ptr)
4403 {
4404 HOST_WIDE_INT offset = 0;
4405 tree ptype = TREE_TYPE (ptr);
4406 tree tem;
4407 /* For convenience allow addresses that collapse to a simple base
4408 and offset. */
4409 if (TREE_CODE (ptr) == ADDR_EXPR
4410 && (handled_component_p (TREE_OPERAND (ptr, 0))
4411 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4412 {
4413 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4414 gcc_assert (ptr);
4415 ptr = build_fold_addr_expr (ptr);
4416 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4417 }
4418 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4419 ptr, build_int_cst (ptype, offset));
4420 SET_EXPR_LOCATION (tem, loc);
4421 return tem;
4422 }
4423
4424 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4425
4426 offset_int
4427 mem_ref_offset (const_tree t)
4428 {
4429 return offset_int::from (TREE_OPERAND (t, 1), SIGNED);
4430 }
4431
4432 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4433 offsetted by OFFSET units. */
4434
4435 tree
4436 build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
4437 {
4438 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4439 build_fold_addr_expr (base),
4440 build_int_cst (ptr_type_node, offset));
4441 tree addr = build1 (ADDR_EXPR, type, ref);
4442 recompute_tree_invariant_for_addr_expr (addr);
4443 return addr;
4444 }
4445
4446 /* Similar except don't specify the TREE_TYPE
4447 and leave the TREE_SIDE_EFFECTS as 0.
4448 It is permissible for arguments to be null,
4449 or even garbage if their values do not matter. */
4450
4451 tree
4452 build_nt (enum tree_code code, ...)
4453 {
4454 tree t;
4455 int length;
4456 int i;
4457 va_list p;
4458
4459 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4460
4461 va_start (p, code);
4462
4463 t = make_node (code);
4464 length = TREE_CODE_LENGTH (code);
4465
4466 for (i = 0; i < length; i++)
4467 TREE_OPERAND (t, i) = va_arg (p, tree);
4468
4469 va_end (p);
4470 return t;
4471 }
4472
4473 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4474 tree vec. */
4475
4476 tree
4477 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4478 {
4479 tree ret, t;
4480 unsigned int ix;
4481
4482 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4483 CALL_EXPR_FN (ret) = fn;
4484 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4485 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4486 CALL_EXPR_ARG (ret, ix) = t;
4487 return ret;
4488 }
4489 \f
4490 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4491 We do NOT enter this node in any sort of symbol table.
4492
4493 LOC is the location of the decl.
4494
4495 layout_decl is used to set up the decl's storage layout.
4496 Other slots are initialized to 0 or null pointers. */
4497
4498 tree
4499 build_decl_stat (location_t loc, enum tree_code code, tree name,
4500 tree type MEM_STAT_DECL)
4501 {
4502 tree t;
4503
4504 t = make_node_stat (code PASS_MEM_STAT);
4505 DECL_SOURCE_LOCATION (t) = loc;
4506
4507 /* if (type == error_mark_node)
4508 type = integer_type_node; */
4509 /* That is not done, deliberately, so that having error_mark_node
4510 as the type can suppress useless errors in the use of this variable. */
4511
4512 DECL_NAME (t) = name;
4513 TREE_TYPE (t) = type;
4514
4515 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4516 layout_decl (t, 0);
4517
4518 return t;
4519 }
4520
4521 /* Builds and returns function declaration with NAME and TYPE. */
4522
4523 tree
4524 build_fn_decl (const char *name, tree type)
4525 {
4526 tree id = get_identifier (name);
4527 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4528
4529 DECL_EXTERNAL (decl) = 1;
4530 TREE_PUBLIC (decl) = 1;
4531 DECL_ARTIFICIAL (decl) = 1;
4532 TREE_NOTHROW (decl) = 1;
4533
4534 return decl;
4535 }
4536
4537 vec<tree, va_gc> *all_translation_units;
4538
4539 /* Builds a new translation-unit decl with name NAME, queues it in the
4540 global list of translation-unit decls and returns it. */
4541
4542 tree
4543 build_translation_unit_decl (tree name)
4544 {
4545 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
4546 name, NULL_TREE);
4547 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
4548 vec_safe_push (all_translation_units, tu);
4549 return tu;
4550 }
4551
4552 \f
4553 /* BLOCK nodes are used to represent the structure of binding contours
4554 and declarations, once those contours have been exited and their contents
4555 compiled. This information is used for outputting debugging info. */
4556
4557 tree
4558 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
4559 {
4560 tree block = make_node (BLOCK);
4561
4562 BLOCK_VARS (block) = vars;
4563 BLOCK_SUBBLOCKS (block) = subblocks;
4564 BLOCK_SUPERCONTEXT (block) = supercontext;
4565 BLOCK_CHAIN (block) = chain;
4566 return block;
4567 }
4568
4569 \f
4570 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4571
4572 LOC is the location to use in tree T. */
4573
4574 void
4575 protected_set_expr_location (tree t, location_t loc)
4576 {
4577 if (t && CAN_HAVE_LOCATION_P (t))
4578 SET_EXPR_LOCATION (t, loc);
4579 }
4580 \f
4581 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES
4582 is ATTRIBUTE. */
4583
4584 tree
4585 build_decl_attribute_variant (tree ddecl, tree attribute)
4586 {
4587 DECL_ATTRIBUTES (ddecl) = attribute;
4588 return ddecl;
4589 }
4590
4591 /* Borrowed from hashtab.c iterative_hash implementation. */
4592 #define mix(a,b,c) \
4593 { \
4594 a -= b; a -= c; a ^= (c>>13); \
4595 b -= c; b -= a; b ^= (a<< 8); \
4596 c -= a; c -= b; c ^= ((b&0xffffffff)>>13); \
4597 a -= b; a -= c; a ^= ((c&0xffffffff)>>12); \
4598 b -= c; b -= a; b = (b ^ (a<<16)) & 0xffffffff; \
4599 c -= a; c -= b; c = (c ^ (b>> 5)) & 0xffffffff; \
4600 a -= b; a -= c; a = (a ^ (c>> 3)) & 0xffffffff; \
4601 b -= c; b -= a; b = (b ^ (a<<10)) & 0xffffffff; \
4602 c -= a; c -= b; c = (c ^ (b>>15)) & 0xffffffff; \
4603 }
4604
4605
4606 /* Produce good hash value combining VAL and VAL2. */
4607 hashval_t
4608 iterative_hash_hashval_t (hashval_t val, hashval_t val2)
4609 {
4610 /* the golden ratio; an arbitrary value. */
4611 hashval_t a = 0x9e3779b9;
4612
4613 mix (a, val, val2);
4614 return val2;
4615 }
4616
4617 /* Produce good hash value combining VAL and VAL2. */
4618 hashval_t
4619 iterative_hash_host_wide_int (HOST_WIDE_INT val, hashval_t val2)
4620 {
4621 if (sizeof (HOST_WIDE_INT) == sizeof (hashval_t))
4622 return iterative_hash_hashval_t (val, val2);
4623 else
4624 {
4625 hashval_t a = (hashval_t) val;
4626 /* Avoid warnings about shifting of more than the width of the type on
4627 hosts that won't execute this path. */
4628 int zero = 0;
4629 hashval_t b = (hashval_t) (val >> (sizeof (hashval_t) * 8 + zero));
4630 mix (a, b, val2);
4631 if (sizeof (HOST_WIDE_INT) > 2 * sizeof (hashval_t))
4632 {
4633 hashval_t a = (hashval_t) (val >> (sizeof (hashval_t) * 16 + zero));
4634 hashval_t b = (hashval_t) (val >> (sizeof (hashval_t) * 24 + zero));
4635 mix (a, b, val2);
4636 }
4637 return val2;
4638 }
4639 }
4640
4641 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4642 is ATTRIBUTE and its qualifiers are QUALS.
4643
4644 Record such modified types already made so we don't make duplicates. */
4645
4646 tree
4647 build_type_attribute_qual_variant (tree ttype, tree attribute, int quals)
4648 {
4649 if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype), attribute))
4650 {
4651 hashval_t hashcode = 0;
4652 tree ntype;
4653 int i;
4654 tree t;
4655 enum tree_code code = TREE_CODE (ttype);
4656
4657 /* Building a distinct copy of a tagged type is inappropriate; it
4658 causes breakage in code that expects there to be a one-to-one
4659 relationship between a struct and its fields.
4660 build_duplicate_type is another solution (as used in
4661 handle_transparent_union_attribute), but that doesn't play well
4662 with the stronger C++ type identity model. */
4663 if (TREE_CODE (ttype) == RECORD_TYPE
4664 || TREE_CODE (ttype) == UNION_TYPE
4665 || TREE_CODE (ttype) == QUAL_UNION_TYPE
4666 || TREE_CODE (ttype) == ENUMERAL_TYPE)
4667 {
4668 warning (OPT_Wattributes,
4669 "ignoring attributes applied to %qT after definition",
4670 TYPE_MAIN_VARIANT (ttype));
4671 return build_qualified_type (ttype, quals);
4672 }
4673
4674 ttype = build_qualified_type (ttype, TYPE_UNQUALIFIED);
4675 ntype = build_distinct_type_copy (ttype);
4676
4677 TYPE_ATTRIBUTES (ntype) = attribute;
4678
4679 hashcode = iterative_hash_object (code, hashcode);
4680 if (TREE_TYPE (ntype))
4681 hashcode = iterative_hash_object (TYPE_HASH (TREE_TYPE (ntype)),
4682 hashcode);
4683 hashcode = attribute_hash_list (attribute, hashcode);
4684
4685 switch (TREE_CODE (ntype))
4686 {
4687 case FUNCTION_TYPE:
4688 hashcode = type_hash_list (TYPE_ARG_TYPES (ntype), hashcode);
4689 break;
4690 case ARRAY_TYPE:
4691 if (TYPE_DOMAIN (ntype))
4692 hashcode = iterative_hash_object (TYPE_HASH (TYPE_DOMAIN (ntype)),
4693 hashcode);
4694 break;
4695 case INTEGER_TYPE:
4696 t = TYPE_MAX_VALUE (ntype);
4697 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
4698 hashcode = iterative_hash_object (TREE_INT_CST_ELT (t, i), hashcode);
4699 break;
4700 case REAL_TYPE:
4701 case FIXED_POINT_TYPE:
4702 {
4703 unsigned int precision = TYPE_PRECISION (ntype);
4704 hashcode = iterative_hash_object (precision, hashcode);
4705 }
4706 break;
4707 default:
4708 break;
4709 }
4710
4711 ntype = type_hash_canon (hashcode, ntype);
4712
4713 /* If the target-dependent attributes make NTYPE different from
4714 its canonical type, we will need to use structural equality
4715 checks for this type. */
4716 if (TYPE_STRUCTURAL_EQUALITY_P (ttype)
4717 || !comp_type_attributes (ntype, ttype))
4718 SET_TYPE_STRUCTURAL_EQUALITY (ntype);
4719 else if (TYPE_CANONICAL (ntype) == ntype)
4720 TYPE_CANONICAL (ntype) = TYPE_CANONICAL (ttype);
4721
4722 ttype = build_qualified_type (ntype, quals);
4723 }
4724 else if (TYPE_QUALS (ttype) != quals)
4725 ttype = build_qualified_type (ttype, quals);
4726
4727 return ttype;
4728 }
4729
4730 /* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are
4731 the same. */
4732
4733 static bool
4734 omp_declare_simd_clauses_equal (tree clauses1, tree clauses2)
4735 {
4736 tree cl1, cl2;
4737 for (cl1 = clauses1, cl2 = clauses2;
4738 cl1 && cl2;
4739 cl1 = OMP_CLAUSE_CHAIN (cl1), cl2 = OMP_CLAUSE_CHAIN (cl2))
4740 {
4741 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_CODE (cl2))
4742 return false;
4743 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_SIMDLEN)
4744 {
4745 if (simple_cst_equal (OMP_CLAUSE_DECL (cl1),
4746 OMP_CLAUSE_DECL (cl2)) != 1)
4747 return false;
4748 }
4749 switch (OMP_CLAUSE_CODE (cl1))
4750 {
4751 case OMP_CLAUSE_ALIGNED:
4752 if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1),
4753 OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2)) != 1)
4754 return false;
4755 break;
4756 case OMP_CLAUSE_LINEAR:
4757 if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1),
4758 OMP_CLAUSE_LINEAR_STEP (cl2)) != 1)
4759 return false;
4760 break;
4761 case OMP_CLAUSE_SIMDLEN:
4762 if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1),
4763 OMP_CLAUSE_SIMDLEN_EXPR (cl2)) != 1)
4764 return false;
4765 default:
4766 break;
4767 }
4768 }
4769 return true;
4770 }
4771
4772 /* Remove duplicate "omp declare simd" attributes. */
4773
4774 void
4775 omp_remove_redundant_declare_simd_attrs (tree fndecl)
4776 {
4777 tree attr, end_attr = NULL_TREE, last_attr = NULL_TREE;
4778 for (attr = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (fndecl));
4779 attr;
4780 attr = lookup_attribute ("omp declare simd", TREE_CHAIN (attr)))
4781 {
4782 tree *pc;
4783 for (pc = &TREE_CHAIN (attr); *pc && *pc != end_attr; )
4784 {
4785 if (is_attribute_p ("omp declare simd", TREE_PURPOSE (*pc)))
4786 {
4787 last_attr = TREE_CHAIN (*pc);
4788 if (TREE_VALUE (attr) == NULL_TREE)
4789 {
4790 if (TREE_VALUE (*pc) == NULL_TREE)
4791 {
4792 *pc = TREE_CHAIN (*pc);
4793 continue;
4794 }
4795 }
4796 else if (TREE_VALUE (*pc) != NULL_TREE
4797 && omp_declare_simd_clauses_equal
4798 (TREE_VALUE (TREE_VALUE (*pc)),
4799 TREE_VALUE (TREE_VALUE (attr))))
4800 {
4801 *pc = TREE_CHAIN (*pc);
4802 continue;
4803 }
4804 }
4805 pc = &TREE_CHAIN (*pc);
4806 }
4807 end_attr = last_attr;
4808 }
4809 }
4810
4811 /* Compare two attributes for their value identity. Return true if the
4812 attribute values are known to be equal; otherwise return false.
4813 */
4814
4815 static bool
4816 attribute_value_equal (const_tree attr1, const_tree attr2)
4817 {
4818 if (TREE_VALUE (attr1) == TREE_VALUE (attr2))
4819 return true;
4820
4821 if (TREE_VALUE (attr1) != NULL_TREE
4822 && TREE_CODE (TREE_VALUE (attr1)) == TREE_LIST
4823 && TREE_VALUE (attr2) != NULL
4824 && TREE_CODE (TREE_VALUE (attr2)) == TREE_LIST)
4825 return (simple_cst_list_equal (TREE_VALUE (attr1),
4826 TREE_VALUE (attr2)) == 1);
4827
4828 if ((flag_openmp || flag_openmp_simd)
4829 && TREE_VALUE (attr1) && TREE_VALUE (attr2)
4830 && TREE_CODE (TREE_VALUE (attr1)) == OMP_CLAUSE
4831 && TREE_CODE (TREE_VALUE (attr2)) == OMP_CLAUSE)
4832 return omp_declare_simd_clauses_equal (TREE_VALUE (attr1),
4833 TREE_VALUE (attr2));
4834
4835 return (simple_cst_equal (TREE_VALUE (attr1), TREE_VALUE (attr2)) == 1);
4836 }
4837
4838 /* Return 0 if the attributes for two types are incompatible, 1 if they
4839 are compatible, and 2 if they are nearly compatible (which causes a
4840 warning to be generated). */
4841 int
4842 comp_type_attributes (const_tree type1, const_tree type2)
4843 {
4844 const_tree a1 = TYPE_ATTRIBUTES (type1);
4845 const_tree a2 = TYPE_ATTRIBUTES (type2);
4846 const_tree a;
4847
4848 if (a1 == a2)
4849 return 1;
4850 for (a = a1; a != NULL_TREE; a = TREE_CHAIN (a))
4851 {
4852 const struct attribute_spec *as;
4853 const_tree attr;
4854
4855 as = lookup_attribute_spec (get_attribute_name (a));
4856 if (!as || as->affects_type_identity == false)
4857 continue;
4858
4859 attr = lookup_attribute (as->name, CONST_CAST_TREE (a2));
4860 if (!attr || !attribute_value_equal (a, attr))
4861 break;
4862 }
4863 if (!a)
4864 {
4865 for (a = a2; a != NULL_TREE; a = TREE_CHAIN (a))
4866 {
4867 const struct attribute_spec *as;
4868
4869 as = lookup_attribute_spec (get_attribute_name (a));
4870 if (!as || as->affects_type_identity == false)
4871 continue;
4872
4873 if (!lookup_attribute (as->name, CONST_CAST_TREE (a1)))
4874 break;
4875 /* We don't need to compare trees again, as we did this
4876 already in first loop. */
4877 }
4878 /* All types - affecting identity - are equal, so
4879 there is no need to call target hook for comparison. */
4880 if (!a)
4881 return 1;
4882 }
4883 /* As some type combinations - like default calling-convention - might
4884 be compatible, we have to call the target hook to get the final result. */
4885 return targetm.comp_type_attributes (type1, type2);
4886 }
4887
4888 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4889 is ATTRIBUTE.
4890
4891 Record such modified types already made so we don't make duplicates. */
4892
4893 tree
4894 build_type_attribute_variant (tree ttype, tree attribute)
4895 {
4896 return build_type_attribute_qual_variant (ttype, attribute,
4897 TYPE_QUALS (ttype));
4898 }
4899
4900
4901 /* Reset the expression *EXPR_P, a size or position.
4902
4903 ??? We could reset all non-constant sizes or positions. But it's cheap
4904 enough to not do so and refrain from adding workarounds to dwarf2out.c.
4905
4906 We need to reset self-referential sizes or positions because they cannot
4907 be gimplified and thus can contain a CALL_EXPR after the gimplification
4908 is finished, which will run afoul of LTO streaming. And they need to be
4909 reset to something essentially dummy but not constant, so as to preserve
4910 the properties of the object they are attached to. */
4911
4912 static inline void
4913 free_lang_data_in_one_sizepos (tree *expr_p)
4914 {
4915 tree expr = *expr_p;
4916 if (CONTAINS_PLACEHOLDER_P (expr))
4917 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
4918 }
4919
4920
4921 /* Reset all the fields in a binfo node BINFO. We only keep
4922 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
4923
4924 static void
4925 free_lang_data_in_binfo (tree binfo)
4926 {
4927 unsigned i;
4928 tree t;
4929
4930 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
4931
4932 BINFO_VIRTUALS (binfo) = NULL_TREE;
4933 BINFO_BASE_ACCESSES (binfo) = NULL;
4934 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
4935 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
4936
4937 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
4938 free_lang_data_in_binfo (t);
4939 }
4940
4941
4942 /* Reset all language specific information still present in TYPE. */
4943
4944 static void
4945 free_lang_data_in_type (tree type)
4946 {
4947 gcc_assert (TYPE_P (type));
4948
4949 /* Give the FE a chance to remove its own data first. */
4950 lang_hooks.free_lang_data (type);
4951
4952 TREE_LANG_FLAG_0 (type) = 0;
4953 TREE_LANG_FLAG_1 (type) = 0;
4954 TREE_LANG_FLAG_2 (type) = 0;
4955 TREE_LANG_FLAG_3 (type) = 0;
4956 TREE_LANG_FLAG_4 (type) = 0;
4957 TREE_LANG_FLAG_5 (type) = 0;
4958 TREE_LANG_FLAG_6 (type) = 0;
4959
4960 if (TREE_CODE (type) == FUNCTION_TYPE)
4961 {
4962 /* Remove the const and volatile qualifiers from arguments. The
4963 C++ front end removes them, but the C front end does not,
4964 leading to false ODR violation errors when merging two
4965 instances of the same function signature compiled by
4966 different front ends. */
4967 tree p;
4968
4969 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
4970 {
4971 tree arg_type = TREE_VALUE (p);
4972
4973 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
4974 {
4975 int quals = TYPE_QUALS (arg_type)
4976 & ~TYPE_QUAL_CONST
4977 & ~TYPE_QUAL_VOLATILE;
4978 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
4979 free_lang_data_in_type (TREE_VALUE (p));
4980 }
4981 }
4982 }
4983
4984 /* Remove members that are not actually FIELD_DECLs from the field
4985 list of an aggregate. These occur in C++. */
4986 if (RECORD_OR_UNION_TYPE_P (type))
4987 {
4988 tree prev, member;
4989
4990 /* Note that TYPE_FIELDS can be shared across distinct
4991 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is
4992 to be removed, we cannot set its TREE_CHAIN to NULL.
4993 Otherwise, we would not be able to find all the other fields
4994 in the other instances of this TREE_TYPE.
4995
4996 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */
4997 prev = NULL_TREE;
4998 member = TYPE_FIELDS (type);
4999 while (member)
5000 {
5001 if (TREE_CODE (member) == FIELD_DECL
5002 || TREE_CODE (member) == TYPE_DECL)
5003 {
5004 if (prev)
5005 TREE_CHAIN (prev) = member;
5006 else
5007 TYPE_FIELDS (type) = member;
5008 prev = member;
5009 }
5010
5011 member = TREE_CHAIN (member);
5012 }
5013
5014 if (prev)
5015 TREE_CHAIN (prev) = NULL_TREE;
5016 else
5017 TYPE_FIELDS (type) = NULL_TREE;
5018
5019 TYPE_METHODS (type) = NULL_TREE;
5020 if (TYPE_BINFO (type))
5021 free_lang_data_in_binfo (TYPE_BINFO (type));
5022 }
5023 else
5024 {
5025 /* For non-aggregate types, clear out the language slot (which
5026 overloads TYPE_BINFO). */
5027 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
5028
5029 if (INTEGRAL_TYPE_P (type)
5030 || SCALAR_FLOAT_TYPE_P (type)
5031 || FIXED_POINT_TYPE_P (type))
5032 {
5033 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
5034 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
5035 }
5036 }
5037
5038 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
5039 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
5040
5041 if (TYPE_CONTEXT (type)
5042 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
5043 {
5044 tree ctx = TYPE_CONTEXT (type);
5045 do
5046 {
5047 ctx = BLOCK_SUPERCONTEXT (ctx);
5048 }
5049 while (ctx && TREE_CODE (ctx) == BLOCK);
5050 TYPE_CONTEXT (type) = ctx;
5051 }
5052 }
5053
5054
5055 /* Return true if DECL may need an assembler name to be set. */
5056
5057 static inline bool
5058 need_assembler_name_p (tree decl)
5059 {
5060 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5061 if (TREE_CODE (decl) != FUNCTION_DECL
5062 && TREE_CODE (decl) != VAR_DECL)
5063 return false;
5064
5065 /* If DECL already has its assembler name set, it does not need a
5066 new one. */
5067 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5068 || DECL_ASSEMBLER_NAME_SET_P (decl))
5069 return false;
5070
5071 /* Abstract decls do not need an assembler name. */
5072 if (DECL_ABSTRACT (decl))
5073 return false;
5074
5075 /* For VAR_DECLs, only static, public and external symbols need an
5076 assembler name. */
5077 if (TREE_CODE (decl) == VAR_DECL
5078 && !TREE_STATIC (decl)
5079 && !TREE_PUBLIC (decl)
5080 && !DECL_EXTERNAL (decl))
5081 return false;
5082
5083 if (TREE_CODE (decl) == FUNCTION_DECL)
5084 {
5085 /* Do not set assembler name on builtins. Allow RTL expansion to
5086 decide whether to expand inline or via a regular call. */
5087 if (DECL_BUILT_IN (decl)
5088 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5089 return false;
5090
5091 /* Functions represented in the callgraph need an assembler name. */
5092 if (cgraph_get_node (decl) != NULL)
5093 return true;
5094
5095 /* Unused and not public functions don't need an assembler name. */
5096 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5097 return false;
5098 }
5099
5100 return true;
5101 }
5102
5103
5104 /* Reset all language specific information still present in symbol
5105 DECL. */
5106
5107 static void
5108 free_lang_data_in_decl (tree decl)
5109 {
5110 gcc_assert (DECL_P (decl));
5111
5112 /* Give the FE a chance to remove its own data first. */
5113 lang_hooks.free_lang_data (decl);
5114
5115 TREE_LANG_FLAG_0 (decl) = 0;
5116 TREE_LANG_FLAG_1 (decl) = 0;
5117 TREE_LANG_FLAG_2 (decl) = 0;
5118 TREE_LANG_FLAG_3 (decl) = 0;
5119 TREE_LANG_FLAG_4 (decl) = 0;
5120 TREE_LANG_FLAG_5 (decl) = 0;
5121 TREE_LANG_FLAG_6 (decl) = 0;
5122
5123 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5124 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5125 if (TREE_CODE (decl) == FIELD_DECL)
5126 {
5127 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5128 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5129 DECL_QUALIFIER (decl) = NULL_TREE;
5130 }
5131
5132 if (TREE_CODE (decl) == FUNCTION_DECL)
5133 {
5134 struct cgraph_node *node;
5135 if (!(node = cgraph_get_node (decl))
5136 || (!node->definition && !node->clones))
5137 {
5138 if (node)
5139 cgraph_release_function_body (node);
5140 else
5141 {
5142 release_function_body (decl);
5143 DECL_ARGUMENTS (decl) = NULL;
5144 DECL_RESULT (decl) = NULL;
5145 DECL_INITIAL (decl) = error_mark_node;
5146 }
5147 }
5148 if (gimple_has_body_p (decl))
5149 {
5150 tree t;
5151
5152 /* If DECL has a gimple body, then the context for its
5153 arguments must be DECL. Otherwise, it doesn't really
5154 matter, as we will not be emitting any code for DECL. In
5155 general, there may be other instances of DECL created by
5156 the front end and since PARM_DECLs are generally shared,
5157 their DECL_CONTEXT changes as the replicas of DECL are
5158 created. The only time where DECL_CONTEXT is important
5159 is for the FUNCTION_DECLs that have a gimple body (since
5160 the PARM_DECL will be used in the function's body). */
5161 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5162 DECL_CONTEXT (t) = decl;
5163 }
5164
5165 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5166 At this point, it is not needed anymore. */
5167 DECL_SAVED_TREE (decl) = NULL_TREE;
5168
5169 /* Clear the abstract origin if it refers to a method. Otherwise
5170 dwarf2out.c will ICE as we clear TYPE_METHODS and thus the
5171 origin will not be output correctly. */
5172 if (DECL_ABSTRACT_ORIGIN (decl)
5173 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5174 && RECORD_OR_UNION_TYPE_P
5175 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5176 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5177
5178 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5179 DECL_VINDEX referring to itself into a vtable slot number as it
5180 should. Happens with functions that are copied and then forgotten
5181 about. Just clear it, it won't matter anymore. */
5182 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl)))
5183 DECL_VINDEX (decl) = NULL_TREE;
5184 }
5185 else if (TREE_CODE (decl) == VAR_DECL)
5186 {
5187 if ((DECL_EXTERNAL (decl)
5188 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5189 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5190 DECL_INITIAL (decl) = NULL_TREE;
5191 }
5192 else if (TREE_CODE (decl) == TYPE_DECL
5193 || TREE_CODE (decl) == FIELD_DECL)
5194 DECL_INITIAL (decl) = NULL_TREE;
5195 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5196 && DECL_INITIAL (decl)
5197 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5198 {
5199 /* Strip builtins from the translation-unit BLOCK. We still have targets
5200 without builtin_decl_explicit support and also builtins are shared
5201 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5202 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5203 while (*nextp)
5204 {
5205 tree var = *nextp;
5206 if (TREE_CODE (var) == FUNCTION_DECL
5207 && DECL_BUILT_IN (var))
5208 *nextp = TREE_CHAIN (var);
5209 else
5210 nextp = &TREE_CHAIN (var);
5211 }
5212 }
5213 }
5214
5215
5216 /* Data used when collecting DECLs and TYPEs for language data removal. */
5217
5218 struct free_lang_data_d
5219 {
5220 /* Worklist to avoid excessive recursion. */
5221 vec<tree> worklist;
5222
5223 /* Set of traversed objects. Used to avoid duplicate visits. */
5224 struct pointer_set_t *pset;
5225
5226 /* Array of symbols to process with free_lang_data_in_decl. */
5227 vec<tree> decls;
5228
5229 /* Array of types to process with free_lang_data_in_type. */
5230 vec<tree> types;
5231 };
5232
5233
5234 /* Save all language fields needed to generate proper debug information
5235 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5236
5237 static void
5238 save_debug_info_for_decl (tree t)
5239 {
5240 /*struct saved_debug_info_d *sdi;*/
5241
5242 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t));
5243
5244 /* FIXME. Partial implementation for saving debug info removed. */
5245 }
5246
5247
5248 /* Save all language fields needed to generate proper debug information
5249 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5250
5251 static void
5252 save_debug_info_for_type (tree t)
5253 {
5254 /*struct saved_debug_info_d *sdi;*/
5255
5256 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t));
5257
5258 /* FIXME. Partial implementation for saving debug info removed. */
5259 }
5260
5261
5262 /* Add type or decl T to one of the list of tree nodes that need their
5263 language data removed. The lists are held inside FLD. */
5264
5265 static void
5266 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5267 {
5268 if (DECL_P (t))
5269 {
5270 fld->decls.safe_push (t);
5271 if (debug_info_level > DINFO_LEVEL_TERSE)
5272 save_debug_info_for_decl (t);
5273 }
5274 else if (TYPE_P (t))
5275 {
5276 fld->types.safe_push (t);
5277 if (debug_info_level > DINFO_LEVEL_TERSE)
5278 save_debug_info_for_type (t);
5279 }
5280 else
5281 gcc_unreachable ();
5282 }
5283
5284 /* Push tree node T into FLD->WORKLIST. */
5285
5286 static inline void
5287 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5288 {
5289 if (t && !is_lang_specific (t) && !pointer_set_contains (fld->pset, t))
5290 fld->worklist.safe_push ((t));
5291 }
5292
5293
5294 /* Operand callback helper for free_lang_data_in_node. *TP is the
5295 subtree operand being considered. */
5296
5297 static tree
5298 find_decls_types_r (tree *tp, int *ws, void *data)
5299 {
5300 tree t = *tp;
5301 struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5302
5303 if (TREE_CODE (t) == TREE_LIST)
5304 return NULL_TREE;
5305
5306 /* Language specific nodes will be removed, so there is no need
5307 to gather anything under them. */
5308 if (is_lang_specific (t))
5309 {
5310 *ws = 0;
5311 return NULL_TREE;
5312 }
5313
5314 if (DECL_P (t))
5315 {
5316 /* Note that walk_tree does not traverse every possible field in
5317 decls, so we have to do our own traversals here. */
5318 add_tree_to_fld_list (t, fld);
5319
5320 fld_worklist_push (DECL_NAME (t), fld);
5321 fld_worklist_push (DECL_CONTEXT (t), fld);
5322 fld_worklist_push (DECL_SIZE (t), fld);
5323 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5324
5325 /* We are going to remove everything under DECL_INITIAL for
5326 TYPE_DECLs. No point walking them. */
5327 if (TREE_CODE (t) != TYPE_DECL)
5328 fld_worklist_push (DECL_INITIAL (t), fld);
5329
5330 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5331 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5332
5333 if (TREE_CODE (t) == FUNCTION_DECL)
5334 {
5335 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5336 fld_worklist_push (DECL_RESULT (t), fld);
5337 }
5338 else if (TREE_CODE (t) == TYPE_DECL)
5339 {
5340 fld_worklist_push (DECL_ARGUMENT_FLD (t), fld);
5341 fld_worklist_push (DECL_VINDEX (t), fld);
5342 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
5343 }
5344 else if (TREE_CODE (t) == FIELD_DECL)
5345 {
5346 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5347 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5348 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5349 fld_worklist_push (DECL_FCONTEXT (t), fld);
5350 }
5351 else if (TREE_CODE (t) == VAR_DECL)
5352 {
5353 fld_worklist_push (DECL_SECTION_NAME (t), fld);
5354 fld_worklist_push (DECL_COMDAT_GROUP (t), fld);
5355 }
5356
5357 if ((TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
5358 && DECL_HAS_VALUE_EXPR_P (t))
5359 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5360
5361 if (TREE_CODE (t) != FIELD_DECL
5362 && TREE_CODE (t) != TYPE_DECL)
5363 fld_worklist_push (TREE_CHAIN (t), fld);
5364 *ws = 0;
5365 }
5366 else if (TYPE_P (t))
5367 {
5368 /* Note that walk_tree does not traverse every possible field in
5369 types, so we have to do our own traversals here. */
5370 add_tree_to_fld_list (t, fld);
5371
5372 if (!RECORD_OR_UNION_TYPE_P (t))
5373 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5374 fld_worklist_push (TYPE_SIZE (t), fld);
5375 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5376 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5377 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5378 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5379 fld_worklist_push (TYPE_NAME (t), fld);
5380 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5381 them and thus do not and want not to reach unused pointer types
5382 this way. */
5383 if (!POINTER_TYPE_P (t))
5384 fld_worklist_push (TYPE_MINVAL (t), fld);
5385 if (!RECORD_OR_UNION_TYPE_P (t))
5386 fld_worklist_push (TYPE_MAXVAL (t), fld);
5387 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5388 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5389 do not and want not to reach unused variants this way. */
5390 if (TYPE_CONTEXT (t))
5391 {
5392 tree ctx = TYPE_CONTEXT (t);
5393 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5394 So push that instead. */
5395 while (ctx && TREE_CODE (ctx) == BLOCK)
5396 ctx = BLOCK_SUPERCONTEXT (ctx);
5397 fld_worklist_push (ctx, fld);
5398 }
5399 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5400 and want not to reach unused types this way. */
5401
5402 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5403 {
5404 unsigned i;
5405 tree tem;
5406 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5407 fld_worklist_push (TREE_TYPE (tem), fld);
5408 tem = BINFO_VIRTUALS (TYPE_BINFO (t));
5409 if (tem
5410 /* The Java FE overloads BINFO_VIRTUALS for its own purpose. */
5411 && TREE_CODE (tem) == TREE_LIST)
5412 do
5413 {
5414 fld_worklist_push (TREE_VALUE (tem), fld);
5415 tem = TREE_CHAIN (tem);
5416 }
5417 while (tem);
5418 }
5419 if (RECORD_OR_UNION_TYPE_P (t))
5420 {
5421 tree tem;
5422 /* Push all TYPE_FIELDS - there can be interleaving interesting
5423 and non-interesting things. */
5424 tem = TYPE_FIELDS (t);
5425 while (tem)
5426 {
5427 if (TREE_CODE (tem) == FIELD_DECL
5428 || TREE_CODE (tem) == TYPE_DECL)
5429 fld_worklist_push (tem, fld);
5430 tem = TREE_CHAIN (tem);
5431 }
5432 }
5433
5434 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5435 *ws = 0;
5436 }
5437 else if (TREE_CODE (t) == BLOCK)
5438 {
5439 tree tem;
5440 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
5441 fld_worklist_push (tem, fld);
5442 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5443 fld_worklist_push (tem, fld);
5444 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5445 }
5446
5447 if (TREE_CODE (t) != IDENTIFIER_NODE
5448 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5449 fld_worklist_push (TREE_TYPE (t), fld);
5450
5451 return NULL_TREE;
5452 }
5453
5454
5455 /* Find decls and types in T. */
5456
5457 static void
5458 find_decls_types (tree t, struct free_lang_data_d *fld)
5459 {
5460 while (1)
5461 {
5462 if (!pointer_set_contains (fld->pset, t))
5463 walk_tree (&t, find_decls_types_r, fld, fld->pset);
5464 if (fld->worklist.is_empty ())
5465 break;
5466 t = fld->worklist.pop ();
5467 }
5468 }
5469
5470 /* Translate all the types in LIST with the corresponding runtime
5471 types. */
5472
5473 static tree
5474 get_eh_types_for_runtime (tree list)
5475 {
5476 tree head, prev;
5477
5478 if (list == NULL_TREE)
5479 return NULL_TREE;
5480
5481 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5482 prev = head;
5483 list = TREE_CHAIN (list);
5484 while (list)
5485 {
5486 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5487 TREE_CHAIN (prev) = n;
5488 prev = TREE_CHAIN (prev);
5489 list = TREE_CHAIN (list);
5490 }
5491
5492 return head;
5493 }
5494
5495
5496 /* Find decls and types referenced in EH region R and store them in
5497 FLD->DECLS and FLD->TYPES. */
5498
5499 static void
5500 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5501 {
5502 switch (r->type)
5503 {
5504 case ERT_CLEANUP:
5505 break;
5506
5507 case ERT_TRY:
5508 {
5509 eh_catch c;
5510
5511 /* The types referenced in each catch must first be changed to the
5512 EH types used at runtime. This removes references to FE types
5513 in the region. */
5514 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5515 {
5516 c->type_list = get_eh_types_for_runtime (c->type_list);
5517 walk_tree (&c->type_list, find_decls_types_r, fld, fld->pset);
5518 }
5519 }
5520 break;
5521
5522 case ERT_ALLOWED_EXCEPTIONS:
5523 r->u.allowed.type_list
5524 = get_eh_types_for_runtime (r->u.allowed.type_list);
5525 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, fld->pset);
5526 break;
5527
5528 case ERT_MUST_NOT_THROW:
5529 walk_tree (&r->u.must_not_throw.failure_decl,
5530 find_decls_types_r, fld, fld->pset);
5531 break;
5532 }
5533 }
5534
5535
5536 /* Find decls and types referenced in cgraph node N and store them in
5537 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5538 look for *every* kind of DECL and TYPE node reachable from N,
5539 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5540 NAMESPACE_DECLs, etc). */
5541
5542 static void
5543 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5544 {
5545 basic_block bb;
5546 struct function *fn;
5547 unsigned ix;
5548 tree t;
5549
5550 find_decls_types (n->decl, fld);
5551
5552 if (!gimple_has_body_p (n->decl))
5553 return;
5554
5555 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5556
5557 fn = DECL_STRUCT_FUNCTION (n->decl);
5558
5559 /* Traverse locals. */
5560 FOR_EACH_LOCAL_DECL (fn, ix, t)
5561 find_decls_types (t, fld);
5562
5563 /* Traverse EH regions in FN. */
5564 {
5565 eh_region r;
5566 FOR_ALL_EH_REGION_FN (r, fn)
5567 find_decls_types_in_eh_region (r, fld);
5568 }
5569
5570 /* Traverse every statement in FN. */
5571 FOR_EACH_BB_FN (bb, fn)
5572 {
5573 gimple_stmt_iterator si;
5574 unsigned i;
5575
5576 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
5577 {
5578 gimple phi = gsi_stmt (si);
5579
5580 for (i = 0; i < gimple_phi_num_args (phi); i++)
5581 {
5582 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5583 find_decls_types (*arg_p, fld);
5584 }
5585 }
5586
5587 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5588 {
5589 gimple stmt = gsi_stmt (si);
5590
5591 if (is_gimple_call (stmt))
5592 find_decls_types (gimple_call_fntype (stmt), fld);
5593
5594 for (i = 0; i < gimple_num_ops (stmt); i++)
5595 {
5596 tree arg = gimple_op (stmt, i);
5597 find_decls_types (arg, fld);
5598 }
5599 }
5600 }
5601 }
5602
5603
5604 /* Find decls and types referenced in varpool node N and store them in
5605 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5606 look for *every* kind of DECL and TYPE node reachable from N,
5607 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5608 NAMESPACE_DECLs, etc). */
5609
5610 static void
5611 find_decls_types_in_var (struct varpool_node *v, struct free_lang_data_d *fld)
5612 {
5613 find_decls_types (v->decl, fld);
5614 }
5615
5616 /* If T needs an assembler name, have one created for it. */
5617
5618 void
5619 assign_assembler_name_if_neeeded (tree t)
5620 {
5621 if (need_assembler_name_p (t))
5622 {
5623 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5624 diagnostics that use input_location to show locus
5625 information. The problem here is that, at this point,
5626 input_location is generally anchored to the end of the file
5627 (since the parser is long gone), so we don't have a good
5628 position to pin it to.
5629
5630 To alleviate this problem, this uses the location of T's
5631 declaration. Examples of this are
5632 testsuite/g++.dg/template/cond2.C and
5633 testsuite/g++.dg/template/pr35240.C. */
5634 location_t saved_location = input_location;
5635 input_location = DECL_SOURCE_LOCATION (t);
5636
5637 decl_assembler_name (t);
5638
5639 input_location = saved_location;
5640 }
5641 }
5642
5643
5644 /* Free language specific information for every operand and expression
5645 in every node of the call graph. This process operates in three stages:
5646
5647 1- Every callgraph node and varpool node is traversed looking for
5648 decls and types embedded in them. This is a more exhaustive
5649 search than that done by find_referenced_vars, because it will
5650 also collect individual fields, decls embedded in types, etc.
5651
5652 2- All the decls found are sent to free_lang_data_in_decl.
5653
5654 3- All the types found are sent to free_lang_data_in_type.
5655
5656 The ordering between decls and types is important because
5657 free_lang_data_in_decl sets assembler names, which includes
5658 mangling. So types cannot be freed up until assembler names have
5659 been set up. */
5660
5661 static void
5662 free_lang_data_in_cgraph (void)
5663 {
5664 struct cgraph_node *n;
5665 struct varpool_node *v;
5666 struct free_lang_data_d fld;
5667 tree t;
5668 unsigned i;
5669 alias_pair *p;
5670
5671 /* Initialize sets and arrays to store referenced decls and types. */
5672 fld.pset = pointer_set_create ();
5673 fld.worklist.create (0);
5674 fld.decls.create (100);
5675 fld.types.create (100);
5676
5677 /* Find decls and types in the body of every function in the callgraph. */
5678 FOR_EACH_FUNCTION (n)
5679 find_decls_types_in_node (n, &fld);
5680
5681 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
5682 find_decls_types (p->decl, &fld);
5683
5684 /* Find decls and types in every varpool symbol. */
5685 FOR_EACH_VARIABLE (v)
5686 find_decls_types_in_var (v, &fld);
5687
5688 /* Set the assembler name on every decl found. We need to do this
5689 now because free_lang_data_in_decl will invalidate data needed
5690 for mangling. This breaks mangling on interdependent decls. */
5691 FOR_EACH_VEC_ELT (fld.decls, i, t)
5692 assign_assembler_name_if_neeeded (t);
5693
5694 /* Traverse every decl found freeing its language data. */
5695 FOR_EACH_VEC_ELT (fld.decls, i, t)
5696 free_lang_data_in_decl (t);
5697
5698 /* Traverse every type found freeing its language data. */
5699 FOR_EACH_VEC_ELT (fld.types, i, t)
5700 free_lang_data_in_type (t);
5701
5702 pointer_set_destroy (fld.pset);
5703 fld.worklist.release ();
5704 fld.decls.release ();
5705 fld.types.release ();
5706 }
5707
5708
5709 /* Free resources that are used by FE but are not needed once they are done. */
5710
5711 static unsigned
5712 free_lang_data (void)
5713 {
5714 unsigned i;
5715
5716 /* If we are the LTO frontend we have freed lang-specific data already. */
5717 if (in_lto_p
5718 || !flag_generate_lto)
5719 return 0;
5720
5721 /* Allocate and assign alias sets to the standard integer types
5722 while the slots are still in the way the frontends generated them. */
5723 for (i = 0; i < itk_none; ++i)
5724 if (integer_types[i])
5725 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
5726
5727 /* Traverse the IL resetting language specific information for
5728 operands, expressions, etc. */
5729 free_lang_data_in_cgraph ();
5730
5731 /* Create gimple variants for common types. */
5732 ptrdiff_type_node = integer_type_node;
5733 fileptr_type_node = ptr_type_node;
5734
5735 /* Reset some langhooks. Do not reset types_compatible_p, it may
5736 still be used indirectly via the get_alias_set langhook. */
5737 lang_hooks.dwarf_name = lhd_dwarf_name;
5738 lang_hooks.decl_printable_name = gimple_decl_printable_name;
5739 /* We do not want the default decl_assembler_name implementation,
5740 rather if we have fixed everything we want a wrapper around it
5741 asserting that all non-local symbols already got their assembler
5742 name and only produce assembler names for local symbols. Or rather
5743 make sure we never call decl_assembler_name on local symbols and
5744 devise a separate, middle-end private scheme for it. */
5745
5746 /* Reset diagnostic machinery. */
5747 tree_diagnostics_defaults (global_dc);
5748
5749 return 0;
5750 }
5751
5752
5753 namespace {
5754
5755 const pass_data pass_data_ipa_free_lang_data =
5756 {
5757 SIMPLE_IPA_PASS, /* type */
5758 "*free_lang_data", /* name */
5759 OPTGROUP_NONE, /* optinfo_flags */
5760 false, /* has_gate */
5761 true, /* has_execute */
5762 TV_IPA_FREE_LANG_DATA, /* tv_id */
5763 0, /* properties_required */
5764 0, /* properties_provided */
5765 0, /* properties_destroyed */
5766 0, /* todo_flags_start */
5767 0, /* todo_flags_finish */
5768 };
5769
5770 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
5771 {
5772 public:
5773 pass_ipa_free_lang_data (gcc::context *ctxt)
5774 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
5775 {}
5776
5777 /* opt_pass methods: */
5778 unsigned int execute () { return free_lang_data (); }
5779
5780 }; // class pass_ipa_free_lang_data
5781
5782 } // anon namespace
5783
5784 simple_ipa_opt_pass *
5785 make_pass_ipa_free_lang_data (gcc::context *ctxt)
5786 {
5787 return new pass_ipa_free_lang_data (ctxt);
5788 }
5789
5790 /* The backbone of is_attribute_p(). ATTR_LEN is the string length of
5791 ATTR_NAME. Also used internally by remove_attribute(). */
5792 bool
5793 private_is_attribute_p (const char *attr_name, size_t attr_len, const_tree ident)
5794 {
5795 size_t ident_len = IDENTIFIER_LENGTH (ident);
5796
5797 if (ident_len == attr_len)
5798 {
5799 if (strcmp (attr_name, IDENTIFIER_POINTER (ident)) == 0)
5800 return true;
5801 }
5802 else if (ident_len == attr_len + 4)
5803 {
5804 /* There is the possibility that ATTR is 'text' and IDENT is
5805 '__text__'. */
5806 const char *p = IDENTIFIER_POINTER (ident);
5807 if (p[0] == '_' && p[1] == '_'
5808 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5809 && strncmp (attr_name, p + 2, attr_len) == 0)
5810 return true;
5811 }
5812
5813 return false;
5814 }
5815
5816 /* The backbone of lookup_attribute(). ATTR_LEN is the string length
5817 of ATTR_NAME, and LIST is not NULL_TREE. */
5818 tree
5819 private_lookup_attribute (const char *attr_name, size_t attr_len, tree list)
5820 {
5821 while (list)
5822 {
5823 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5824
5825 if (ident_len == attr_len)
5826 {
5827 if (!strcmp (attr_name,
5828 IDENTIFIER_POINTER (get_attribute_name (list))))
5829 break;
5830 }
5831 /* TODO: If we made sure that attributes were stored in the
5832 canonical form without '__...__' (ie, as in 'text' as opposed
5833 to '__text__') then we could avoid the following case. */
5834 else if (ident_len == attr_len + 4)
5835 {
5836 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5837 if (p[0] == '_' && p[1] == '_'
5838 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5839 && strncmp (attr_name, p + 2, attr_len) == 0)
5840 break;
5841 }
5842 list = TREE_CHAIN (list);
5843 }
5844
5845 return list;
5846 }
5847
5848 /* A variant of lookup_attribute() that can be used with an identifier
5849 as the first argument, and where the identifier can be either
5850 'text' or '__text__'.
5851
5852 Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST,
5853 return a pointer to the attribute's list element if the attribute
5854 is part of the list, or NULL_TREE if not found. If the attribute
5855 appears more than once, this only returns the first occurrence; the
5856 TREE_CHAIN of the return value should be passed back in if further
5857 occurrences are wanted. ATTR_IDENTIFIER must be an identifier but
5858 can be in the form 'text' or '__text__'. */
5859 static tree
5860 lookup_ident_attribute (tree attr_identifier, tree list)
5861 {
5862 gcc_checking_assert (TREE_CODE (attr_identifier) == IDENTIFIER_NODE);
5863
5864 while (list)
5865 {
5866 gcc_checking_assert (TREE_CODE (get_attribute_name (list))
5867 == IDENTIFIER_NODE);
5868
5869 /* Identifiers can be compared directly for equality. */
5870 if (attr_identifier == get_attribute_name (list))
5871 break;
5872
5873 /* If they are not equal, they may still be one in the form
5874 'text' while the other one is in the form '__text__'. TODO:
5875 If we were storing attributes in normalized 'text' form, then
5876 this could all go away and we could take full advantage of
5877 the fact that we're comparing identifiers. :-) */
5878 {
5879 size_t attr_len = IDENTIFIER_LENGTH (attr_identifier);
5880 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5881
5882 if (ident_len == attr_len + 4)
5883 {
5884 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5885 const char *q = IDENTIFIER_POINTER (attr_identifier);
5886 if (p[0] == '_' && p[1] == '_'
5887 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5888 && strncmp (q, p + 2, attr_len) == 0)
5889 break;
5890 }
5891 else if (ident_len + 4 == attr_len)
5892 {
5893 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5894 const char *q = IDENTIFIER_POINTER (attr_identifier);
5895 if (q[0] == '_' && q[1] == '_'
5896 && q[attr_len - 2] == '_' && q[attr_len - 1] == '_'
5897 && strncmp (q + 2, p, ident_len) == 0)
5898 break;
5899 }
5900 }
5901 list = TREE_CHAIN (list);
5902 }
5903
5904 return list;
5905 }
5906
5907 /* Remove any instances of attribute ATTR_NAME in LIST and return the
5908 modified list. */
5909
5910 tree
5911 remove_attribute (const char *attr_name, tree list)
5912 {
5913 tree *p;
5914 size_t attr_len = strlen (attr_name);
5915
5916 gcc_checking_assert (attr_name[0] != '_');
5917
5918 for (p = &list; *p; )
5919 {
5920 tree l = *p;
5921 /* TODO: If we were storing attributes in normalized form, here
5922 we could use a simple strcmp(). */
5923 if (private_is_attribute_p (attr_name, attr_len, get_attribute_name (l)))
5924 *p = TREE_CHAIN (l);
5925 else
5926 p = &TREE_CHAIN (l);
5927 }
5928
5929 return list;
5930 }
5931
5932 /* Return an attribute list that is the union of a1 and a2. */
5933
5934 tree
5935 merge_attributes (tree a1, tree a2)
5936 {
5937 tree attributes;
5938
5939 /* Either one unset? Take the set one. */
5940
5941 if ((attributes = a1) == 0)
5942 attributes = a2;
5943
5944 /* One that completely contains the other? Take it. */
5945
5946 else if (a2 != 0 && ! attribute_list_contained (a1, a2))
5947 {
5948 if (attribute_list_contained (a2, a1))
5949 attributes = a2;
5950 else
5951 {
5952 /* Pick the longest list, and hang on the other list. */
5953
5954 if (list_length (a1) < list_length (a2))
5955 attributes = a2, a2 = a1;
5956
5957 for (; a2 != 0; a2 = TREE_CHAIN (a2))
5958 {
5959 tree a;
5960 for (a = lookup_ident_attribute (get_attribute_name (a2),
5961 attributes);
5962 a != NULL_TREE && !attribute_value_equal (a, a2);
5963 a = lookup_ident_attribute (get_attribute_name (a2),
5964 TREE_CHAIN (a)))
5965 ;
5966 if (a == NULL_TREE)
5967 {
5968 a1 = copy_node (a2);
5969 TREE_CHAIN (a1) = attributes;
5970 attributes = a1;
5971 }
5972 }
5973 }
5974 }
5975 return attributes;
5976 }
5977
5978 /* Given types T1 and T2, merge their attributes and return
5979 the result. */
5980
5981 tree
5982 merge_type_attributes (tree t1, tree t2)
5983 {
5984 return merge_attributes (TYPE_ATTRIBUTES (t1),
5985 TYPE_ATTRIBUTES (t2));
5986 }
5987
5988 /* Given decls OLDDECL and NEWDECL, merge their attributes and return
5989 the result. */
5990
5991 tree
5992 merge_decl_attributes (tree olddecl, tree newdecl)
5993 {
5994 return merge_attributes (DECL_ATTRIBUTES (olddecl),
5995 DECL_ATTRIBUTES (newdecl));
5996 }
5997
5998 #if TARGET_DLLIMPORT_DECL_ATTRIBUTES
5999
6000 /* Specialization of merge_decl_attributes for various Windows targets.
6001
6002 This handles the following situation:
6003
6004 __declspec (dllimport) int foo;
6005 int foo;
6006
6007 The second instance of `foo' nullifies the dllimport. */
6008
6009 tree
6010 merge_dllimport_decl_attributes (tree old, tree new_tree)
6011 {
6012 tree a;
6013 int delete_dllimport_p = 1;
6014
6015 /* What we need to do here is remove from `old' dllimport if it doesn't
6016 appear in `new'. dllimport behaves like extern: if a declaration is
6017 marked dllimport and a definition appears later, then the object
6018 is not dllimport'd. We also remove a `new' dllimport if the old list
6019 contains dllexport: dllexport always overrides dllimport, regardless
6020 of the order of declaration. */
6021 if (!VAR_OR_FUNCTION_DECL_P (new_tree))
6022 delete_dllimport_p = 0;
6023 else if (DECL_DLLIMPORT_P (new_tree)
6024 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old)))
6025 {
6026 DECL_DLLIMPORT_P (new_tree) = 0;
6027 warning (OPT_Wattributes, "%q+D already declared with dllexport attribute: "
6028 "dllimport ignored", new_tree);
6029 }
6030 else if (DECL_DLLIMPORT_P (old) && !DECL_DLLIMPORT_P (new_tree))
6031 {
6032 /* Warn about overriding a symbol that has already been used, e.g.:
6033 extern int __attribute__ ((dllimport)) foo;
6034 int* bar () {return &foo;}
6035 int foo;
6036 */
6037 if (TREE_USED (old))
6038 {
6039 warning (0, "%q+D redeclared without dllimport attribute "
6040 "after being referenced with dll linkage", new_tree);
6041 /* If we have used a variable's address with dllimport linkage,
6042 keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the
6043 decl may already have had TREE_CONSTANT computed.
6044 We still remove the attribute so that assembler code refers
6045 to '&foo rather than '_imp__foo'. */
6046 if (TREE_CODE (old) == VAR_DECL && TREE_ADDRESSABLE (old))
6047 DECL_DLLIMPORT_P (new_tree) = 1;
6048 }
6049
6050 /* Let an inline definition silently override the external reference,
6051 but otherwise warn about attribute inconsistency. */
6052 else if (TREE_CODE (new_tree) == VAR_DECL
6053 || !DECL_DECLARED_INLINE_P (new_tree))
6054 warning (OPT_Wattributes, "%q+D redeclared without dllimport attribute: "
6055 "previous dllimport ignored", new_tree);
6056 }
6057 else
6058 delete_dllimport_p = 0;
6059
6060 a = merge_attributes (DECL_ATTRIBUTES (old), DECL_ATTRIBUTES (new_tree));
6061
6062 if (delete_dllimport_p)
6063 a = remove_attribute ("dllimport", a);
6064
6065 return a;
6066 }
6067
6068 /* Handle a "dllimport" or "dllexport" attribute; arguments as in
6069 struct attribute_spec.handler. */
6070
6071 tree
6072 handle_dll_attribute (tree * pnode, tree name, tree args, int flags,
6073 bool *no_add_attrs)
6074 {
6075 tree node = *pnode;
6076 bool is_dllimport;
6077
6078 /* These attributes may apply to structure and union types being created,
6079 but otherwise should pass to the declaration involved. */
6080 if (!DECL_P (node))
6081 {
6082 if (flags & ((int) ATTR_FLAG_DECL_NEXT | (int) ATTR_FLAG_FUNCTION_NEXT
6083 | (int) ATTR_FLAG_ARRAY_NEXT))
6084 {
6085 *no_add_attrs = true;
6086 return tree_cons (name, args, NULL_TREE);
6087 }
6088 if (TREE_CODE (node) == RECORD_TYPE
6089 || TREE_CODE (node) == UNION_TYPE)
6090 {
6091 node = TYPE_NAME (node);
6092 if (!node)
6093 return NULL_TREE;
6094 }
6095 else
6096 {
6097 warning (OPT_Wattributes, "%qE attribute ignored",
6098 name);
6099 *no_add_attrs = true;
6100 return NULL_TREE;
6101 }
6102 }
6103
6104 if (TREE_CODE (node) != FUNCTION_DECL
6105 && TREE_CODE (node) != VAR_DECL
6106 && TREE_CODE (node) != TYPE_DECL)
6107 {
6108 *no_add_attrs = true;
6109 warning (OPT_Wattributes, "%qE attribute ignored",
6110 name);
6111 return NULL_TREE;
6112 }
6113
6114 if (TREE_CODE (node) == TYPE_DECL
6115 && TREE_CODE (TREE_TYPE (node)) != RECORD_TYPE
6116 && TREE_CODE (TREE_TYPE (node)) != UNION_TYPE)
6117 {
6118 *no_add_attrs = true;
6119 warning (OPT_Wattributes, "%qE attribute ignored",
6120 name);
6121 return NULL_TREE;
6122 }
6123
6124 is_dllimport = is_attribute_p ("dllimport", name);
6125
6126 /* Report error on dllimport ambiguities seen now before they cause
6127 any damage. */
6128 if (is_dllimport)
6129 {
6130 /* Honor any target-specific overrides. */
6131 if (!targetm.valid_dllimport_attribute_p (node))
6132 *no_add_attrs = true;
6133
6134 else if (TREE_CODE (node) == FUNCTION_DECL
6135 && DECL_DECLARED_INLINE_P (node))
6136 {
6137 warning (OPT_Wattributes, "inline function %q+D declared as "
6138 " dllimport: attribute ignored", node);
6139 *no_add_attrs = true;
6140 }
6141 /* Like MS, treat definition of dllimported variables and
6142 non-inlined functions on declaration as syntax errors. */
6143 else if (TREE_CODE (node) == FUNCTION_DECL && DECL_INITIAL (node))
6144 {
6145 error ("function %q+D definition is marked dllimport", node);
6146 *no_add_attrs = true;
6147 }
6148
6149 else if (TREE_CODE (node) == VAR_DECL)
6150 {
6151 if (DECL_INITIAL (node))
6152 {
6153 error ("variable %q+D definition is marked dllimport",
6154 node);
6155 *no_add_attrs = true;
6156 }
6157
6158 /* `extern' needn't be specified with dllimport.
6159 Specify `extern' now and hope for the best. Sigh. */
6160 DECL_EXTERNAL (node) = 1;
6161 /* Also, implicitly give dllimport'd variables declared within
6162 a function global scope, unless declared static. */
6163 if (current_function_decl != NULL_TREE && !TREE_STATIC (node))
6164 TREE_PUBLIC (node) = 1;
6165 }
6166
6167 if (*no_add_attrs == false)
6168 DECL_DLLIMPORT_P (node) = 1;
6169 }
6170 else if (TREE_CODE (node) == FUNCTION_DECL
6171 && DECL_DECLARED_INLINE_P (node)
6172 && flag_keep_inline_dllexport)
6173 /* An exported function, even if inline, must be emitted. */
6174 DECL_EXTERNAL (node) = 0;
6175
6176 /* Report error if symbol is not accessible at global scope. */
6177 if (!TREE_PUBLIC (node)
6178 && (TREE_CODE (node) == VAR_DECL
6179 || TREE_CODE (node) == FUNCTION_DECL))
6180 {
6181 error ("external linkage required for symbol %q+D because of "
6182 "%qE attribute", node, name);
6183 *no_add_attrs = true;
6184 }
6185
6186 /* A dllexport'd entity must have default visibility so that other
6187 program units (shared libraries or the main executable) can see
6188 it. A dllimport'd entity must have default visibility so that
6189 the linker knows that undefined references within this program
6190 unit can be resolved by the dynamic linker. */
6191 if (!*no_add_attrs)
6192 {
6193 if (DECL_VISIBILITY_SPECIFIED (node)
6194 && DECL_VISIBILITY (node) != VISIBILITY_DEFAULT)
6195 error ("%qE implies default visibility, but %qD has already "
6196 "been declared with a different visibility",
6197 name, node);
6198 DECL_VISIBILITY (node) = VISIBILITY_DEFAULT;
6199 DECL_VISIBILITY_SPECIFIED (node) = 1;
6200 }
6201
6202 return NULL_TREE;
6203 }
6204
6205 #endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */
6206 \f
6207 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6208 of the various TYPE_QUAL values. */
6209
6210 static void
6211 set_type_quals (tree type, int type_quals)
6212 {
6213 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6214 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6215 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6216 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6217 }
6218
6219 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6220
6221 bool
6222 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6223 {
6224 return (TYPE_QUALS (cand) == type_quals
6225 && TYPE_NAME (cand) == TYPE_NAME (base)
6226 /* Apparently this is needed for Objective-C. */
6227 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6228 /* Check alignment. */
6229 && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
6230 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6231 TYPE_ATTRIBUTES (base)));
6232 }
6233
6234 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6235
6236 static bool
6237 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6238 {
6239 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6240 && TYPE_NAME (cand) == TYPE_NAME (base)
6241 /* Apparently this is needed for Objective-C. */
6242 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6243 /* Check alignment. */
6244 && TYPE_ALIGN (cand) == align
6245 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6246 TYPE_ATTRIBUTES (base)));
6247 }
6248
6249 /* Return a version of the TYPE, qualified as indicated by the
6250 TYPE_QUALS, if one exists. If no qualified version exists yet,
6251 return NULL_TREE. */
6252
6253 tree
6254 get_qualified_type (tree type, int type_quals)
6255 {
6256 tree t;
6257
6258 if (TYPE_QUALS (type) == type_quals)
6259 return type;
6260
6261 /* Search the chain of variants to see if there is already one there just
6262 like the one we need to have. If so, use that existing one. We must
6263 preserve the TYPE_NAME, since there is code that depends on this. */
6264 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6265 if (check_qualified_type (t, type, type_quals))
6266 return t;
6267
6268 return NULL_TREE;
6269 }
6270
6271 /* Like get_qualified_type, but creates the type if it does not
6272 exist. This function never returns NULL_TREE. */
6273
6274 tree
6275 build_qualified_type (tree type, int type_quals)
6276 {
6277 tree t;
6278
6279 /* See if we already have the appropriate qualified variant. */
6280 t = get_qualified_type (type, type_quals);
6281
6282 /* If not, build it. */
6283 if (!t)
6284 {
6285 t = build_variant_type_copy (type);
6286 set_type_quals (t, type_quals);
6287
6288 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6289 /* Propagate structural equality. */
6290 SET_TYPE_STRUCTURAL_EQUALITY (t);
6291 else if (TYPE_CANONICAL (type) != type)
6292 /* Build the underlying canonical type, since it is different
6293 from TYPE. */
6294 TYPE_CANONICAL (t) = build_qualified_type (TYPE_CANONICAL (type),
6295 type_quals);
6296 else
6297 /* T is its own canonical type. */
6298 TYPE_CANONICAL (t) = t;
6299
6300 }
6301
6302 return t;
6303 }
6304
6305 /* Create a variant of type T with alignment ALIGN. */
6306
6307 tree
6308 build_aligned_type (tree type, unsigned int align)
6309 {
6310 tree t;
6311
6312 if (TYPE_PACKED (type)
6313 || TYPE_ALIGN (type) == align)
6314 return type;
6315
6316 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6317 if (check_aligned_type (t, type, align))
6318 return t;
6319
6320 t = build_variant_type_copy (type);
6321 TYPE_ALIGN (t) = align;
6322
6323 return t;
6324 }
6325
6326 /* Create a new distinct copy of TYPE. The new type is made its own
6327 MAIN_VARIANT. If TYPE requires structural equality checks, the
6328 resulting type requires structural equality checks; otherwise, its
6329 TYPE_CANONICAL points to itself. */
6330
6331 tree
6332 build_distinct_type_copy (tree type)
6333 {
6334 tree t = copy_node (type);
6335
6336 TYPE_POINTER_TO (t) = 0;
6337 TYPE_REFERENCE_TO (t) = 0;
6338
6339 /* Set the canonical type either to a new equivalence class, or
6340 propagate the need for structural equality checks. */
6341 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6342 SET_TYPE_STRUCTURAL_EQUALITY (t);
6343 else
6344 TYPE_CANONICAL (t) = t;
6345
6346 /* Make it its own variant. */
6347 TYPE_MAIN_VARIANT (t) = t;
6348 TYPE_NEXT_VARIANT (t) = 0;
6349
6350 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6351 whose TREE_TYPE is not t. This can also happen in the Ada
6352 frontend when using subtypes. */
6353
6354 return t;
6355 }
6356
6357 /* Create a new variant of TYPE, equivalent but distinct. This is so
6358 the caller can modify it. TYPE_CANONICAL for the return type will
6359 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6360 are considered equal by the language itself (or that both types
6361 require structural equality checks). */
6362
6363 tree
6364 build_variant_type_copy (tree type)
6365 {
6366 tree t, m = TYPE_MAIN_VARIANT (type);
6367
6368 t = build_distinct_type_copy (type);
6369
6370 /* Since we're building a variant, assume that it is a non-semantic
6371 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6372 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6373
6374 /* Add the new type to the chain of variants of TYPE. */
6375 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6376 TYPE_NEXT_VARIANT (m) = t;
6377 TYPE_MAIN_VARIANT (t) = m;
6378
6379 return t;
6380 }
6381 \f
6382 /* Return true if the from tree in both tree maps are equal. */
6383
6384 int
6385 tree_map_base_eq (const void *va, const void *vb)
6386 {
6387 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6388 *const b = (const struct tree_map_base *) vb;
6389 return (a->from == b->from);
6390 }
6391
6392 /* Hash a from tree in a tree_base_map. */
6393
6394 unsigned int
6395 tree_map_base_hash (const void *item)
6396 {
6397 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6398 }
6399
6400 /* Return true if this tree map structure is marked for garbage collection
6401 purposes. We simply return true if the from tree is marked, so that this
6402 structure goes away when the from tree goes away. */
6403
6404 int
6405 tree_map_base_marked_p (const void *p)
6406 {
6407 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6408 }
6409
6410 /* Hash a from tree in a tree_map. */
6411
6412 unsigned int
6413 tree_map_hash (const void *item)
6414 {
6415 return (((const struct tree_map *) item)->hash);
6416 }
6417
6418 /* Hash a from tree in a tree_decl_map. */
6419
6420 unsigned int
6421 tree_decl_map_hash (const void *item)
6422 {
6423 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6424 }
6425
6426 /* Return the initialization priority for DECL. */
6427
6428 priority_type
6429 decl_init_priority_lookup (tree decl)
6430 {
6431 struct tree_priority_map *h;
6432 struct tree_map_base in;
6433
6434 gcc_assert (VAR_OR_FUNCTION_DECL_P (decl));
6435 in.from = decl;
6436 h = (struct tree_priority_map *) htab_find (init_priority_for_decl, &in);
6437 return h ? h->init : DEFAULT_INIT_PRIORITY;
6438 }
6439
6440 /* Return the finalization priority for DECL. */
6441
6442 priority_type
6443 decl_fini_priority_lookup (tree decl)
6444 {
6445 struct tree_priority_map *h;
6446 struct tree_map_base in;
6447
6448 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
6449 in.from = decl;
6450 h = (struct tree_priority_map *) htab_find (init_priority_for_decl, &in);
6451 return h ? h->fini : DEFAULT_INIT_PRIORITY;
6452 }
6453
6454 /* Return the initialization and finalization priority information for
6455 DECL. If there is no previous priority information, a freshly
6456 allocated structure is returned. */
6457
6458 static struct tree_priority_map *
6459 decl_priority_info (tree decl)
6460 {
6461 struct tree_priority_map in;
6462 struct tree_priority_map *h;
6463 void **loc;
6464
6465 in.base.from = decl;
6466 loc = htab_find_slot (init_priority_for_decl, &in, INSERT);
6467 h = (struct tree_priority_map *) *loc;
6468 if (!h)
6469 {
6470 h = ggc_alloc_cleared_tree_priority_map ();
6471 *loc = h;
6472 h->base.from = decl;
6473 h->init = DEFAULT_INIT_PRIORITY;
6474 h->fini = DEFAULT_INIT_PRIORITY;
6475 }
6476
6477 return h;
6478 }
6479
6480 /* Set the initialization priority for DECL to PRIORITY. */
6481
6482 void
6483 decl_init_priority_insert (tree decl, priority_type priority)
6484 {
6485 struct tree_priority_map *h;
6486
6487 gcc_assert (VAR_OR_FUNCTION_DECL_P (decl));
6488 if (priority == DEFAULT_INIT_PRIORITY)
6489 return;
6490 h = decl_priority_info (decl);
6491 h->init = priority;
6492 }
6493
6494 /* Set the finalization priority for DECL to PRIORITY. */
6495
6496 void
6497 decl_fini_priority_insert (tree decl, priority_type priority)
6498 {
6499 struct tree_priority_map *h;
6500
6501 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
6502 if (priority == DEFAULT_INIT_PRIORITY)
6503 return;
6504 h = decl_priority_info (decl);
6505 h->fini = priority;
6506 }
6507
6508 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6509
6510 static void
6511 print_debug_expr_statistics (void)
6512 {
6513 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6514 (long) htab_size (debug_expr_for_decl),
6515 (long) htab_elements (debug_expr_for_decl),
6516 htab_collisions (debug_expr_for_decl));
6517 }
6518
6519 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6520
6521 static void
6522 print_value_expr_statistics (void)
6523 {
6524 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6525 (long) htab_size (value_expr_for_decl),
6526 (long) htab_elements (value_expr_for_decl),
6527 htab_collisions (value_expr_for_decl));
6528 }
6529
6530 /* Lookup a debug expression for FROM, and return it if we find one. */
6531
6532 tree
6533 decl_debug_expr_lookup (tree from)
6534 {
6535 struct tree_decl_map *h, in;
6536 in.base.from = from;
6537
6538 h = (struct tree_decl_map *)
6539 htab_find_with_hash (debug_expr_for_decl, &in, DECL_UID (from));
6540 if (h)
6541 return h->to;
6542 return NULL_TREE;
6543 }
6544
6545 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6546
6547 void
6548 decl_debug_expr_insert (tree from, tree to)
6549 {
6550 struct tree_decl_map *h;
6551 void **loc;
6552
6553 h = ggc_alloc_tree_decl_map ();
6554 h->base.from = from;
6555 h->to = to;
6556 loc = htab_find_slot_with_hash (debug_expr_for_decl, h, DECL_UID (from),
6557 INSERT);
6558 *(struct tree_decl_map **) loc = h;
6559 }
6560
6561 /* Lookup a value expression for FROM, and return it if we find one. */
6562
6563 tree
6564 decl_value_expr_lookup (tree from)
6565 {
6566 struct tree_decl_map *h, in;
6567 in.base.from = from;
6568
6569 h = (struct tree_decl_map *)
6570 htab_find_with_hash (value_expr_for_decl, &in, DECL_UID (from));
6571 if (h)
6572 return h->to;
6573 return NULL_TREE;
6574 }
6575
6576 /* Insert a mapping FROM->TO in the value expression hashtable. */
6577
6578 void
6579 decl_value_expr_insert (tree from, tree to)
6580 {
6581 struct tree_decl_map *h;
6582 void **loc;
6583
6584 h = ggc_alloc_tree_decl_map ();
6585 h->base.from = from;
6586 h->to = to;
6587 loc = htab_find_slot_with_hash (value_expr_for_decl, h, DECL_UID (from),
6588 INSERT);
6589 *(struct tree_decl_map **) loc = h;
6590 }
6591
6592 /* Lookup a vector of debug arguments for FROM, and return it if we
6593 find one. */
6594
6595 vec<tree, va_gc> **
6596 decl_debug_args_lookup (tree from)
6597 {
6598 struct tree_vec_map *h, in;
6599
6600 if (!DECL_HAS_DEBUG_ARGS_P (from))
6601 return NULL;
6602 gcc_checking_assert (debug_args_for_decl != NULL);
6603 in.base.from = from;
6604 h = (struct tree_vec_map *)
6605 htab_find_with_hash (debug_args_for_decl, &in, DECL_UID (from));
6606 if (h)
6607 return &h->to;
6608 return NULL;
6609 }
6610
6611 /* Insert a mapping FROM->empty vector of debug arguments in the value
6612 expression hashtable. */
6613
6614 vec<tree, va_gc> **
6615 decl_debug_args_insert (tree from)
6616 {
6617 struct tree_vec_map *h;
6618 void **loc;
6619
6620 if (DECL_HAS_DEBUG_ARGS_P (from))
6621 return decl_debug_args_lookup (from);
6622 if (debug_args_for_decl == NULL)
6623 debug_args_for_decl = htab_create_ggc (64, tree_vec_map_hash,
6624 tree_vec_map_eq, 0);
6625 h = ggc_alloc_tree_vec_map ();
6626 h->base.from = from;
6627 h->to = NULL;
6628 loc = htab_find_slot_with_hash (debug_args_for_decl, h, DECL_UID (from),
6629 INSERT);
6630 *(struct tree_vec_map **) loc = h;
6631 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6632 return &h->to;
6633 }
6634
6635 /* Hashing of types so that we don't make duplicates.
6636 The entry point is `type_hash_canon'. */
6637
6638 /* Compute a hash code for a list of types (chain of TREE_LIST nodes
6639 with types in the TREE_VALUE slots), by adding the hash codes
6640 of the individual types. */
6641
6642 static unsigned int
6643 type_hash_list (const_tree list, hashval_t hashcode)
6644 {
6645 const_tree tail;
6646
6647 for (tail = list; tail; tail = TREE_CHAIN (tail))
6648 if (TREE_VALUE (tail) != error_mark_node)
6649 hashcode = iterative_hash_object (TYPE_HASH (TREE_VALUE (tail)),
6650 hashcode);
6651
6652 return hashcode;
6653 }
6654
6655 /* These are the Hashtable callback functions. */
6656
6657 /* Returns true iff the types are equivalent. */
6658
6659 static int
6660 type_hash_eq (const void *va, const void *vb)
6661 {
6662 const struct type_hash *const a = (const struct type_hash *) va,
6663 *const b = (const struct type_hash *) vb;
6664
6665 /* First test the things that are the same for all types. */
6666 if (a->hash != b->hash
6667 || TREE_CODE (a->type) != TREE_CODE (b->type)
6668 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6669 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6670 TYPE_ATTRIBUTES (b->type))
6671 || (TREE_CODE (a->type) != COMPLEX_TYPE
6672 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6673 return 0;
6674
6675 /* Be careful about comparing arrays before and after the element type
6676 has been completed; don't compare TYPE_ALIGN unless both types are
6677 complete. */
6678 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6679 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6680 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6681 return 0;
6682
6683 switch (TREE_CODE (a->type))
6684 {
6685 case VOID_TYPE:
6686 case COMPLEX_TYPE:
6687 case POINTER_TYPE:
6688 case REFERENCE_TYPE:
6689 case NULLPTR_TYPE:
6690 return 1;
6691
6692 case VECTOR_TYPE:
6693 return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type);
6694
6695 case ENUMERAL_TYPE:
6696 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6697 && !(TYPE_VALUES (a->type)
6698 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6699 && TYPE_VALUES (b->type)
6700 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6701 && type_list_equal (TYPE_VALUES (a->type),
6702 TYPE_VALUES (b->type))))
6703 return 0;
6704
6705 /* ... fall through ... */
6706
6707 case INTEGER_TYPE:
6708 case REAL_TYPE:
6709 case BOOLEAN_TYPE:
6710 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6711 return false;
6712 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6713 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6714 TYPE_MAX_VALUE (b->type)))
6715 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6716 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6717 TYPE_MIN_VALUE (b->type))));
6718
6719 case FIXED_POINT_TYPE:
6720 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6721
6722 case OFFSET_TYPE:
6723 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6724
6725 case METHOD_TYPE:
6726 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6727 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6728 || (TYPE_ARG_TYPES (a->type)
6729 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6730 && TYPE_ARG_TYPES (b->type)
6731 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6732 && type_list_equal (TYPE_ARG_TYPES (a->type),
6733 TYPE_ARG_TYPES (b->type)))))
6734 break;
6735 return 0;
6736 case ARRAY_TYPE:
6737 return TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type);
6738
6739 case RECORD_TYPE:
6740 case UNION_TYPE:
6741 case QUAL_UNION_TYPE:
6742 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6743 || (TYPE_FIELDS (a->type)
6744 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6745 && TYPE_FIELDS (b->type)
6746 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6747 && type_list_equal (TYPE_FIELDS (a->type),
6748 TYPE_FIELDS (b->type))));
6749
6750 case FUNCTION_TYPE:
6751 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6752 || (TYPE_ARG_TYPES (a->type)
6753 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6754 && TYPE_ARG_TYPES (b->type)
6755 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6756 && type_list_equal (TYPE_ARG_TYPES (a->type),
6757 TYPE_ARG_TYPES (b->type))))
6758 break;
6759 return 0;
6760
6761 default:
6762 return 0;
6763 }
6764
6765 if (lang_hooks.types.type_hash_eq != NULL)
6766 return lang_hooks.types.type_hash_eq (a->type, b->type);
6767
6768 return 1;
6769 }
6770
6771 /* Return the cached hash value. */
6772
6773 static hashval_t
6774 type_hash_hash (const void *item)
6775 {
6776 return ((const struct type_hash *) item)->hash;
6777 }
6778
6779 /* Look in the type hash table for a type isomorphic to TYPE.
6780 If one is found, return it. Otherwise return 0. */
6781
6782 static tree
6783 type_hash_lookup (hashval_t hashcode, tree type)
6784 {
6785 struct type_hash *h, in;
6786
6787 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6788 must call that routine before comparing TYPE_ALIGNs. */
6789 layout_type (type);
6790
6791 in.hash = hashcode;
6792 in.type = type;
6793
6794 h = (struct type_hash *) htab_find_with_hash (type_hash_table, &in,
6795 hashcode);
6796 if (h)
6797 return h->type;
6798 return NULL_TREE;
6799 }
6800
6801 /* Add an entry to the type-hash-table
6802 for a type TYPE whose hash code is HASHCODE. */
6803
6804 static void
6805 type_hash_add (hashval_t hashcode, tree type)
6806 {
6807 struct type_hash *h;
6808 void **loc;
6809
6810 h = ggc_alloc_type_hash ();
6811 h->hash = hashcode;
6812 h->type = type;
6813 loc = htab_find_slot_with_hash (type_hash_table, h, hashcode, INSERT);
6814 *loc = (void *)h;
6815 }
6816
6817 /* Given TYPE, and HASHCODE its hash code, return the canonical
6818 object for an identical type if one already exists.
6819 Otherwise, return TYPE, and record it as the canonical object.
6820
6821 To use this function, first create a type of the sort you want.
6822 Then compute its hash code from the fields of the type that
6823 make it different from other similar types.
6824 Then call this function and use the value. */
6825
6826 tree
6827 type_hash_canon (unsigned int hashcode, tree type)
6828 {
6829 tree t1;
6830
6831 /* The hash table only contains main variants, so ensure that's what we're
6832 being passed. */
6833 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6834
6835 /* See if the type is in the hash table already. If so, return it.
6836 Otherwise, add the type. */
6837 t1 = type_hash_lookup (hashcode, type);
6838 if (t1 != 0)
6839 {
6840 if (GATHER_STATISTICS)
6841 {
6842 tree_code_counts[(int) TREE_CODE (type)]--;
6843 tree_node_counts[(int) t_kind]--;
6844 tree_node_sizes[(int) t_kind] -= sizeof (struct tree_type_non_common);
6845 }
6846 return t1;
6847 }
6848 else
6849 {
6850 type_hash_add (hashcode, type);
6851 return type;
6852 }
6853 }
6854
6855 /* See if the data pointed to by the type hash table is marked. We consider
6856 it marked if the type is marked or if a debug type number or symbol
6857 table entry has been made for the type. */
6858
6859 static int
6860 type_hash_marked_p (const void *p)
6861 {
6862 const_tree const type = ((const struct type_hash *) p)->type;
6863
6864 return ggc_marked_p (type);
6865 }
6866
6867 static void
6868 print_type_hash_statistics (void)
6869 {
6870 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
6871 (long) htab_size (type_hash_table),
6872 (long) htab_elements (type_hash_table),
6873 htab_collisions (type_hash_table));
6874 }
6875
6876 /* Compute a hash code for a list of attributes (chain of TREE_LIST nodes
6877 with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots),
6878 by adding the hash codes of the individual attributes. */
6879
6880 static unsigned int
6881 attribute_hash_list (const_tree list, hashval_t hashcode)
6882 {
6883 const_tree tail;
6884
6885 for (tail = list; tail; tail = TREE_CHAIN (tail))
6886 /* ??? Do we want to add in TREE_VALUE too? */
6887 hashcode = iterative_hash_object
6888 (IDENTIFIER_HASH_VALUE (get_attribute_name (tail)), hashcode);
6889 return hashcode;
6890 }
6891
6892 /* Given two lists of attributes, return true if list l2 is
6893 equivalent to l1. */
6894
6895 int
6896 attribute_list_equal (const_tree l1, const_tree l2)
6897 {
6898 if (l1 == l2)
6899 return 1;
6900
6901 return attribute_list_contained (l1, l2)
6902 && attribute_list_contained (l2, l1);
6903 }
6904
6905 /* Given two lists of attributes, return true if list L2 is
6906 completely contained within L1. */
6907 /* ??? This would be faster if attribute names were stored in a canonicalized
6908 form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method
6909 must be used to show these elements are equivalent (which they are). */
6910 /* ??? It's not clear that attributes with arguments will always be handled
6911 correctly. */
6912
6913 int
6914 attribute_list_contained (const_tree l1, const_tree l2)
6915 {
6916 const_tree t1, t2;
6917
6918 /* First check the obvious, maybe the lists are identical. */
6919 if (l1 == l2)
6920 return 1;
6921
6922 /* Maybe the lists are similar. */
6923 for (t1 = l1, t2 = l2;
6924 t1 != 0 && t2 != 0
6925 && get_attribute_name (t1) == get_attribute_name (t2)
6926 && TREE_VALUE (t1) == TREE_VALUE (t2);
6927 t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6928 ;
6929
6930 /* Maybe the lists are equal. */
6931 if (t1 == 0 && t2 == 0)
6932 return 1;
6933
6934 for (; t2 != 0; t2 = TREE_CHAIN (t2))
6935 {
6936 const_tree attr;
6937 /* This CONST_CAST is okay because lookup_attribute does not
6938 modify its argument and the return value is assigned to a
6939 const_tree. */
6940 for (attr = lookup_ident_attribute (get_attribute_name (t2),
6941 CONST_CAST_TREE (l1));
6942 attr != NULL_TREE && !attribute_value_equal (t2, attr);
6943 attr = lookup_ident_attribute (get_attribute_name (t2),
6944 TREE_CHAIN (attr)))
6945 ;
6946
6947 if (attr == NULL_TREE)
6948 return 0;
6949 }
6950
6951 return 1;
6952 }
6953
6954 /* Given two lists of types
6955 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6956 return 1 if the lists contain the same types in the same order.
6957 Also, the TREE_PURPOSEs must match. */
6958
6959 int
6960 type_list_equal (const_tree l1, const_tree l2)
6961 {
6962 const_tree t1, t2;
6963
6964 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6965 if (TREE_VALUE (t1) != TREE_VALUE (t2)
6966 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
6967 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
6968 && (TREE_TYPE (TREE_PURPOSE (t1))
6969 == TREE_TYPE (TREE_PURPOSE (t2))))))
6970 return 0;
6971
6972 return t1 == t2;
6973 }
6974
6975 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
6976 given by TYPE. If the argument list accepts variable arguments,
6977 then this function counts only the ordinary arguments. */
6978
6979 int
6980 type_num_arguments (const_tree type)
6981 {
6982 int i = 0;
6983 tree t;
6984
6985 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
6986 /* If the function does not take a variable number of arguments,
6987 the last element in the list will have type `void'. */
6988 if (VOID_TYPE_P (TREE_VALUE (t)))
6989 break;
6990 else
6991 ++i;
6992
6993 return i;
6994 }
6995
6996 /* Nonzero if integer constants T1 and T2
6997 represent the same constant value. */
6998
6999 int
7000 tree_int_cst_equal (const_tree t1, const_tree t2)
7001 {
7002 if (t1 == t2)
7003 return 1;
7004
7005 if (t1 == 0 || t2 == 0)
7006 return 0;
7007
7008 if (TREE_CODE (t1) == INTEGER_CST
7009 && TREE_CODE (t2) == INTEGER_CST
7010 && wi::to_widest (t1) == wi::to_widest (t2))
7011 return 1;
7012
7013 return 0;
7014 }
7015
7016 /* Nonzero if integer constants T1 and T2 represent values that satisfy <.
7017 The precise way of comparison depends on their data type. */
7018
7019 int
7020 tree_int_cst_lt (const_tree t1, const_tree t2)
7021 {
7022 return INT_CST_LT (t1, t2);
7023 }
7024
7025 /* Returns -1 if T1 < T2, 0 if T1 == T2, and 1 if T1 > T2. */
7026
7027 int
7028 tree_int_cst_compare (const_tree t1, const_tree t2)
7029 {
7030 return wi::cmps (wi::to_widest (t1), wi::to_widest (t2));
7031 }
7032
7033 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
7034 kind INTEGER_CST. This makes sure to properly sign-extend the
7035 constant. */
7036
7037 HOST_WIDE_INT
7038 size_low_cst (const_tree t)
7039 {
7040 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
7041 int prec = TYPE_PRECISION (TREE_TYPE (t));
7042 if (prec < HOST_BITS_PER_WIDE_INT)
7043 return sext_hwi (w, prec);
7044 return w;
7045 }
7046
7047 /* Return the most significant (sign) bit of T. */
7048
7049 int
7050 tree_int_cst_sign_bit (const_tree t)
7051 {
7052 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7053
7054 return wi::extract_uhwi (t, bitno, 1);
7055 }
7056
7057 /* Return an indication of the sign of the integer constant T.
7058 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7059 Note that -1 will never be returned if T's type is unsigned. */
7060
7061 int
7062 tree_int_cst_sgn (const_tree t)
7063 {
7064 if (wi::eq_p (t, 0))
7065 return 0;
7066 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7067 return 1;
7068 else if (wi::neg_p (t))
7069 return -1;
7070 else
7071 return 1;
7072 }
7073
7074 /* Return the minimum number of bits needed to represent VALUE in a
7075 signed or unsigned type, UNSIGNEDP says which. */
7076
7077 unsigned int
7078 tree_int_cst_min_precision (tree value, signop sgn)
7079 {
7080 /* If the value is negative, compute its negative minus 1. The latter
7081 adjustment is because the absolute value of the largest negative value
7082 is one larger than the largest positive value. This is equivalent to
7083 a bit-wise negation, so use that operation instead. */
7084
7085 if (tree_int_cst_sgn (value) < 0)
7086 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7087
7088 /* Return the number of bits needed, taking into account the fact
7089 that we need one more bit for a signed than unsigned type.
7090 If value is 0 or -1, the minimum precision is 1 no matter
7091 whether unsignedp is true or false. */
7092
7093 if (integer_zerop (value))
7094 return 1;
7095 else
7096 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7097 }
7098
7099 /* Compare two constructor-element-type constants. Return 1 if the lists
7100 are known to be equal; otherwise return 0. */
7101
7102 int
7103 simple_cst_list_equal (const_tree l1, const_tree l2)
7104 {
7105 while (l1 != NULL_TREE && l2 != NULL_TREE)
7106 {
7107 if (simple_cst_equal (TREE_VALUE (l1), TREE_VALUE (l2)) != 1)
7108 return 0;
7109
7110 l1 = TREE_CHAIN (l1);
7111 l2 = TREE_CHAIN (l2);
7112 }
7113
7114 return l1 == l2;
7115 }
7116
7117 /* Return truthvalue of whether T1 is the same tree structure as T2.
7118 Return 1 if they are the same.
7119 Return 0 if they are understandably different.
7120 Return -1 if either contains tree structure not understood by
7121 this function. */
7122
7123 int
7124 simple_cst_equal (const_tree t1, const_tree t2)
7125 {
7126 enum tree_code code1, code2;
7127 int cmp;
7128 int i;
7129
7130 if (t1 == t2)
7131 return 1;
7132 if (t1 == 0 || t2 == 0)
7133 return 0;
7134
7135 code1 = TREE_CODE (t1);
7136 code2 = TREE_CODE (t2);
7137
7138 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7139 {
7140 if (CONVERT_EXPR_CODE_P (code2)
7141 || code2 == NON_LVALUE_EXPR)
7142 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7143 else
7144 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7145 }
7146
7147 else if (CONVERT_EXPR_CODE_P (code2)
7148 || code2 == NON_LVALUE_EXPR)
7149 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7150
7151 if (code1 != code2)
7152 return 0;
7153
7154 switch (code1)
7155 {
7156 case INTEGER_CST:
7157 return wi::to_widest (t1) == wi::to_widest (t2);
7158
7159 case REAL_CST:
7160 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (t1), TREE_REAL_CST (t2));
7161
7162 case FIXED_CST:
7163 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7164
7165 case STRING_CST:
7166 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7167 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7168 TREE_STRING_LENGTH (t1)));
7169
7170 case CONSTRUCTOR:
7171 {
7172 unsigned HOST_WIDE_INT idx;
7173 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7174 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7175
7176 if (vec_safe_length (v1) != vec_safe_length (v2))
7177 return false;
7178
7179 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7180 /* ??? Should we handle also fields here? */
7181 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7182 return false;
7183 return true;
7184 }
7185
7186 case SAVE_EXPR:
7187 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7188
7189 case CALL_EXPR:
7190 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7191 if (cmp <= 0)
7192 return cmp;
7193 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7194 return 0;
7195 {
7196 const_tree arg1, arg2;
7197 const_call_expr_arg_iterator iter1, iter2;
7198 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7199 arg2 = first_const_call_expr_arg (t2, &iter2);
7200 arg1 && arg2;
7201 arg1 = next_const_call_expr_arg (&iter1),
7202 arg2 = next_const_call_expr_arg (&iter2))
7203 {
7204 cmp = simple_cst_equal (arg1, arg2);
7205 if (cmp <= 0)
7206 return cmp;
7207 }
7208 return arg1 == arg2;
7209 }
7210
7211 case TARGET_EXPR:
7212 /* Special case: if either target is an unallocated VAR_DECL,
7213 it means that it's going to be unified with whatever the
7214 TARGET_EXPR is really supposed to initialize, so treat it
7215 as being equivalent to anything. */
7216 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7217 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7218 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7219 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7220 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7221 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7222 cmp = 1;
7223 else
7224 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7225
7226 if (cmp <= 0)
7227 return cmp;
7228
7229 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7230
7231 case WITH_CLEANUP_EXPR:
7232 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7233 if (cmp <= 0)
7234 return cmp;
7235
7236 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7237
7238 case COMPONENT_REF:
7239 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7240 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7241
7242 return 0;
7243
7244 case VAR_DECL:
7245 case PARM_DECL:
7246 case CONST_DECL:
7247 case FUNCTION_DECL:
7248 return 0;
7249
7250 default:
7251 break;
7252 }
7253
7254 /* This general rule works for most tree codes. All exceptions should be
7255 handled above. If this is a language-specific tree code, we can't
7256 trust what might be in the operand, so say we don't know
7257 the situation. */
7258 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7259 return -1;
7260
7261 switch (TREE_CODE_CLASS (code1))
7262 {
7263 case tcc_unary:
7264 case tcc_binary:
7265 case tcc_comparison:
7266 case tcc_expression:
7267 case tcc_reference:
7268 case tcc_statement:
7269 cmp = 1;
7270 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7271 {
7272 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7273 if (cmp <= 0)
7274 return cmp;
7275 }
7276
7277 return cmp;
7278
7279 default:
7280 return -1;
7281 }
7282 }
7283
7284 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7285 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7286 than U, respectively. */
7287
7288 int
7289 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7290 {
7291 if (tree_int_cst_sgn (t) < 0)
7292 return -1;
7293 else if (!cst_fits_uhwi_p (t))
7294 return 1;
7295 else if ((unsigned HOST_WIDE_INT) tree_to_hwi (t) == u)
7296 return 0;
7297 else if ((unsigned HOST_WIDE_INT) tree_to_hwi (t) < u)
7298 return -1;
7299 else
7300 return 1;
7301 }
7302
7303 /* Return true if SIZE represents a constant size that is in bounds of
7304 what the middle-end and the backend accepts (covering not more than
7305 half of the address-space). */
7306
7307 bool
7308 valid_constant_size_p (const_tree size)
7309 {
7310 if (! tree_fits_uhwi_p (size)
7311 || TREE_OVERFLOW (size)
7312 || tree_int_cst_sign_bit (size) != 0)
7313 return false;
7314 return true;
7315 }
7316
7317 /* Return the precision of the type, or for a complex or vector type the
7318 precision of the type of its elements. */
7319
7320 unsigned int
7321 element_precision (const_tree type)
7322 {
7323 enum tree_code code = TREE_CODE (type);
7324 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7325 type = TREE_TYPE (type);
7326
7327 return TYPE_PRECISION (type);
7328 }
7329
7330 /* Return true if CODE represents an associative tree code. Otherwise
7331 return false. */
7332 bool
7333 associative_tree_code (enum tree_code code)
7334 {
7335 switch (code)
7336 {
7337 case BIT_IOR_EXPR:
7338 case BIT_AND_EXPR:
7339 case BIT_XOR_EXPR:
7340 case PLUS_EXPR:
7341 case MULT_EXPR:
7342 case MIN_EXPR:
7343 case MAX_EXPR:
7344 return true;
7345
7346 default:
7347 break;
7348 }
7349 return false;
7350 }
7351
7352 /* Return true if CODE represents a commutative tree code. Otherwise
7353 return false. */
7354 bool
7355 commutative_tree_code (enum tree_code code)
7356 {
7357 switch (code)
7358 {
7359 case PLUS_EXPR:
7360 case MULT_EXPR:
7361 case MULT_HIGHPART_EXPR:
7362 case MIN_EXPR:
7363 case MAX_EXPR:
7364 case BIT_IOR_EXPR:
7365 case BIT_XOR_EXPR:
7366 case BIT_AND_EXPR:
7367 case NE_EXPR:
7368 case EQ_EXPR:
7369 case UNORDERED_EXPR:
7370 case ORDERED_EXPR:
7371 case UNEQ_EXPR:
7372 case LTGT_EXPR:
7373 case TRUTH_AND_EXPR:
7374 case TRUTH_XOR_EXPR:
7375 case TRUTH_OR_EXPR:
7376 case WIDEN_MULT_EXPR:
7377 case VEC_WIDEN_MULT_HI_EXPR:
7378 case VEC_WIDEN_MULT_LO_EXPR:
7379 case VEC_WIDEN_MULT_EVEN_EXPR:
7380 case VEC_WIDEN_MULT_ODD_EXPR:
7381 return true;
7382
7383 default:
7384 break;
7385 }
7386 return false;
7387 }
7388
7389 /* Return true if CODE represents a ternary tree code for which the
7390 first two operands are commutative. Otherwise return false. */
7391 bool
7392 commutative_ternary_tree_code (enum tree_code code)
7393 {
7394 switch (code)
7395 {
7396 case WIDEN_MULT_PLUS_EXPR:
7397 case WIDEN_MULT_MINUS_EXPR:
7398 return true;
7399
7400 default:
7401 break;
7402 }
7403 return false;
7404 }
7405
7406 /* Generate a hash value for an expression. This can be used iteratively
7407 by passing a previous result as the VAL argument.
7408
7409 This function is intended to produce the same hash for expressions which
7410 would compare equal using operand_equal_p. */
7411
7412 hashval_t
7413 iterative_hash_expr (const_tree t, hashval_t val)
7414 {
7415 int i;
7416 enum tree_code code;
7417 char tclass;
7418
7419 if (t == NULL_TREE)
7420 return iterative_hash_hashval_t (0, val);
7421
7422 code = TREE_CODE (t);
7423
7424 switch (code)
7425 {
7426 /* Alas, constants aren't shared, so we can't rely on pointer
7427 identity. */
7428 case INTEGER_CST:
7429 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
7430 val = iterative_hash_host_wide_int (TREE_INT_CST_ELT (t, i), val);
7431 return val;
7432 case REAL_CST:
7433 {
7434 unsigned int val2 = real_hash (TREE_REAL_CST_PTR (t));
7435
7436 return iterative_hash_hashval_t (val2, val);
7437 }
7438 case FIXED_CST:
7439 {
7440 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7441
7442 return iterative_hash_hashval_t (val2, val);
7443 }
7444 case STRING_CST:
7445 return iterative_hash (TREE_STRING_POINTER (t),
7446 TREE_STRING_LENGTH (t), val);
7447 case COMPLEX_CST:
7448 val = iterative_hash_expr (TREE_REALPART (t), val);
7449 return iterative_hash_expr (TREE_IMAGPART (t), val);
7450 case VECTOR_CST:
7451 {
7452 unsigned i;
7453 for (i = 0; i < VECTOR_CST_NELTS (t); ++i)
7454 val = iterative_hash_expr (VECTOR_CST_ELT (t, i), val);
7455 return val;
7456 }
7457 case SSA_NAME:
7458 /* We can just compare by pointer. */
7459 return iterative_hash_host_wide_int (SSA_NAME_VERSION (t), val);
7460 case PLACEHOLDER_EXPR:
7461 /* The node itself doesn't matter. */
7462 return val;
7463 case TREE_LIST:
7464 /* A list of expressions, for a CALL_EXPR or as the elements of a
7465 VECTOR_CST. */
7466 for (; t; t = TREE_CHAIN (t))
7467 val = iterative_hash_expr (TREE_VALUE (t), val);
7468 return val;
7469 case CONSTRUCTOR:
7470 {
7471 unsigned HOST_WIDE_INT idx;
7472 tree field, value;
7473 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7474 {
7475 val = iterative_hash_expr (field, val);
7476 val = iterative_hash_expr (value, val);
7477 }
7478 return val;
7479 }
7480 case FUNCTION_DECL:
7481 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7482 Otherwise nodes that compare equal according to operand_equal_p might
7483 get different hash codes. However, don't do this for machine specific
7484 or front end builtins, since the function code is overloaded in those
7485 cases. */
7486 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7487 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7488 {
7489 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7490 code = TREE_CODE (t);
7491 }
7492 /* FALL THROUGH */
7493 default:
7494 tclass = TREE_CODE_CLASS (code);
7495
7496 if (tclass == tcc_declaration)
7497 {
7498 /* DECL's have a unique ID */
7499 val = iterative_hash_host_wide_int (DECL_UID (t), val);
7500 }
7501 else
7502 {
7503 gcc_assert (IS_EXPR_CODE_CLASS (tclass));
7504
7505 val = iterative_hash_object (code, val);
7506
7507 /* Don't hash the type, that can lead to having nodes which
7508 compare equal according to operand_equal_p, but which
7509 have different hash codes. */
7510 if (CONVERT_EXPR_CODE_P (code)
7511 || code == NON_LVALUE_EXPR)
7512 {
7513 /* Make sure to include signness in the hash computation. */
7514 val += TYPE_UNSIGNED (TREE_TYPE (t));
7515 val = iterative_hash_expr (TREE_OPERAND (t, 0), val);
7516 }
7517
7518 else if (commutative_tree_code (code))
7519 {
7520 /* It's a commutative expression. We want to hash it the same
7521 however it appears. We do this by first hashing both operands
7522 and then rehashing based on the order of their independent
7523 hashes. */
7524 hashval_t one = iterative_hash_expr (TREE_OPERAND (t, 0), 0);
7525 hashval_t two = iterative_hash_expr (TREE_OPERAND (t, 1), 0);
7526 hashval_t t;
7527
7528 if (one > two)
7529 t = one, one = two, two = t;
7530
7531 val = iterative_hash_hashval_t (one, val);
7532 val = iterative_hash_hashval_t (two, val);
7533 }
7534 else
7535 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
7536 val = iterative_hash_expr (TREE_OPERAND (t, i), val);
7537 }
7538 return val;
7539 }
7540 }
7541
7542 /* Generate a hash value for a pair of expressions. This can be used
7543 iteratively by passing a previous result as the VAL argument.
7544
7545 The same hash value is always returned for a given pair of expressions,
7546 regardless of the order in which they are presented. This is useful in
7547 hashing the operands of commutative functions. */
7548
7549 hashval_t
7550 iterative_hash_exprs_commutative (const_tree t1,
7551 const_tree t2, hashval_t val)
7552 {
7553 hashval_t one = iterative_hash_expr (t1, 0);
7554 hashval_t two = iterative_hash_expr (t2, 0);
7555 hashval_t t;
7556
7557 if (one > two)
7558 t = one, one = two, two = t;
7559 val = iterative_hash_hashval_t (one, val);
7560 val = iterative_hash_hashval_t (two, val);
7561
7562 return val;
7563 }
7564 \f
7565 /* Constructors for pointer, array and function types.
7566 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7567 constructed by language-dependent code, not here.) */
7568
7569 /* Construct, lay out and return the type of pointers to TO_TYPE with
7570 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7571 reference all of memory. If such a type has already been
7572 constructed, reuse it. */
7573
7574 tree
7575 build_pointer_type_for_mode (tree to_type, enum machine_mode mode,
7576 bool can_alias_all)
7577 {
7578 tree t;
7579
7580 if (to_type == error_mark_node)
7581 return error_mark_node;
7582
7583 /* If the pointed-to type has the may_alias attribute set, force
7584 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7585 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7586 can_alias_all = true;
7587
7588 /* In some cases, languages will have things that aren't a POINTER_TYPE
7589 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7590 In that case, return that type without regard to the rest of our
7591 operands.
7592
7593 ??? This is a kludge, but consistent with the way this function has
7594 always operated and there doesn't seem to be a good way to avoid this
7595 at the moment. */
7596 if (TYPE_POINTER_TO (to_type) != 0
7597 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7598 return TYPE_POINTER_TO (to_type);
7599
7600 /* First, if we already have a type for pointers to TO_TYPE and it's
7601 the proper mode, use it. */
7602 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7603 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7604 return t;
7605
7606 t = make_node (POINTER_TYPE);
7607
7608 TREE_TYPE (t) = to_type;
7609 SET_TYPE_MODE (t, mode);
7610 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7611 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7612 TYPE_POINTER_TO (to_type) = t;
7613
7614 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7615 SET_TYPE_STRUCTURAL_EQUALITY (t);
7616 else if (TYPE_CANONICAL (to_type) != to_type)
7617 TYPE_CANONICAL (t)
7618 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7619 mode, can_alias_all);
7620
7621 /* Lay out the type. This function has many callers that are concerned
7622 with expression-construction, and this simplifies them all. */
7623 layout_type (t);
7624
7625 return t;
7626 }
7627
7628 /* By default build pointers in ptr_mode. */
7629
7630 tree
7631 build_pointer_type (tree to_type)
7632 {
7633 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7634 : TYPE_ADDR_SPACE (to_type);
7635 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7636 return build_pointer_type_for_mode (to_type, pointer_mode, false);
7637 }
7638
7639 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7640
7641 tree
7642 build_reference_type_for_mode (tree to_type, enum machine_mode mode,
7643 bool can_alias_all)
7644 {
7645 tree t;
7646
7647 if (to_type == error_mark_node)
7648 return error_mark_node;
7649
7650 /* If the pointed-to type has the may_alias attribute set, force
7651 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7652 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7653 can_alias_all = true;
7654
7655 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7656 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7657 In that case, return that type without regard to the rest of our
7658 operands.
7659
7660 ??? This is a kludge, but consistent with the way this function has
7661 always operated and there doesn't seem to be a good way to avoid this
7662 at the moment. */
7663 if (TYPE_REFERENCE_TO (to_type) != 0
7664 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7665 return TYPE_REFERENCE_TO (to_type);
7666
7667 /* First, if we already have a type for pointers to TO_TYPE and it's
7668 the proper mode, use it. */
7669 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7670 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7671 return t;
7672
7673 t = make_node (REFERENCE_TYPE);
7674
7675 TREE_TYPE (t) = to_type;
7676 SET_TYPE_MODE (t, mode);
7677 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7678 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7679 TYPE_REFERENCE_TO (to_type) = t;
7680
7681 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7682 SET_TYPE_STRUCTURAL_EQUALITY (t);
7683 else if (TYPE_CANONICAL (to_type) != to_type)
7684 TYPE_CANONICAL (t)
7685 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7686 mode, can_alias_all);
7687
7688 layout_type (t);
7689
7690 return t;
7691 }
7692
7693
7694 /* Build the node for the type of references-to-TO_TYPE by default
7695 in ptr_mode. */
7696
7697 tree
7698 build_reference_type (tree to_type)
7699 {
7700 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7701 : TYPE_ADDR_SPACE (to_type);
7702 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7703 return build_reference_type_for_mode (to_type, pointer_mode, false);
7704 }
7705
7706 /* Build a type that is compatible with t but has no cv quals anywhere
7707 in its type, thus
7708
7709 const char *const *const * -> char ***. */
7710
7711 tree
7712 build_type_no_quals (tree t)
7713 {
7714 switch (TREE_CODE (t))
7715 {
7716 case POINTER_TYPE:
7717 return build_pointer_type_for_mode (build_type_no_quals (TREE_TYPE (t)),
7718 TYPE_MODE (t),
7719 TYPE_REF_CAN_ALIAS_ALL (t));
7720 case REFERENCE_TYPE:
7721 return
7722 build_reference_type_for_mode (build_type_no_quals (TREE_TYPE (t)),
7723 TYPE_MODE (t),
7724 TYPE_REF_CAN_ALIAS_ALL (t));
7725 default:
7726 return TYPE_MAIN_VARIANT (t);
7727 }
7728 }
7729
7730 #define MAX_INT_CACHED_PREC \
7731 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7732 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7733
7734 /* Builds a signed or unsigned integer type of precision PRECISION.
7735 Used for C bitfields whose precision does not match that of
7736 built-in target types. */
7737 tree
7738 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7739 int unsignedp)
7740 {
7741 tree itype, ret;
7742
7743 if (unsignedp)
7744 unsignedp = MAX_INT_CACHED_PREC + 1;
7745
7746 if (precision <= MAX_INT_CACHED_PREC)
7747 {
7748 itype = nonstandard_integer_type_cache[precision + unsignedp];
7749 if (itype)
7750 return itype;
7751 }
7752
7753 itype = make_node (INTEGER_TYPE);
7754 TYPE_PRECISION (itype) = precision;
7755
7756 if (unsignedp)
7757 fixup_unsigned_type (itype);
7758 else
7759 fixup_signed_type (itype);
7760
7761 ret = itype;
7762 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype)))
7763 ret = type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype)), itype);
7764 if (precision <= MAX_INT_CACHED_PREC)
7765 nonstandard_integer_type_cache[precision + unsignedp] = ret;
7766
7767 return ret;
7768 }
7769
7770 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7771 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7772 is true, reuse such a type that has already been constructed. */
7773
7774 static tree
7775 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7776 {
7777 tree itype = make_node (INTEGER_TYPE);
7778 hashval_t hashcode = 0;
7779
7780 TREE_TYPE (itype) = type;
7781
7782 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7783 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7784
7785 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7786 SET_TYPE_MODE (itype, TYPE_MODE (type));
7787 TYPE_SIZE (itype) = TYPE_SIZE (type);
7788 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7789 TYPE_ALIGN (itype) = TYPE_ALIGN (type);
7790 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7791
7792 if (!shared)
7793 return itype;
7794
7795 if ((TYPE_MIN_VALUE (itype)
7796 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7797 || (TYPE_MAX_VALUE (itype)
7798 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7799 {
7800 /* Since we cannot reliably merge this type, we need to compare it using
7801 structural equality checks. */
7802 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7803 return itype;
7804 }
7805
7806 hashcode = iterative_hash_expr (TYPE_MIN_VALUE (itype), hashcode);
7807 hashcode = iterative_hash_expr (TYPE_MAX_VALUE (itype), hashcode);
7808 hashcode = iterative_hash_hashval_t (TYPE_HASH (type), hashcode);
7809 itype = type_hash_canon (hashcode, itype);
7810
7811 return itype;
7812 }
7813
7814 /* Wrapper around build_range_type_1 with SHARED set to true. */
7815
7816 tree
7817 build_range_type (tree type, tree lowval, tree highval)
7818 {
7819 return build_range_type_1 (type, lowval, highval, true);
7820 }
7821
7822 /* Wrapper around build_range_type_1 with SHARED set to false. */
7823
7824 tree
7825 build_nonshared_range_type (tree type, tree lowval, tree highval)
7826 {
7827 return build_range_type_1 (type, lowval, highval, false);
7828 }
7829
7830 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7831 MAXVAL should be the maximum value in the domain
7832 (one less than the length of the array).
7833
7834 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7835 We don't enforce this limit, that is up to caller (e.g. language front end).
7836 The limit exists because the result is a signed type and we don't handle
7837 sizes that use more than one HOST_WIDE_INT. */
7838
7839 tree
7840 build_index_type (tree maxval)
7841 {
7842 return build_range_type (sizetype, size_zero_node, maxval);
7843 }
7844
7845 /* Return true if the debug information for TYPE, a subtype, should be emitted
7846 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7847 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7848 debug info and doesn't reflect the source code. */
7849
7850 bool
7851 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7852 {
7853 tree base_type = TREE_TYPE (type), low, high;
7854
7855 /* Subrange types have a base type which is an integral type. */
7856 if (!INTEGRAL_TYPE_P (base_type))
7857 return false;
7858
7859 /* Get the real bounds of the subtype. */
7860 if (lang_hooks.types.get_subrange_bounds)
7861 lang_hooks.types.get_subrange_bounds (type, &low, &high);
7862 else
7863 {
7864 low = TYPE_MIN_VALUE (type);
7865 high = TYPE_MAX_VALUE (type);
7866 }
7867
7868 /* If the type and its base type have the same representation and the same
7869 name, then the type is not a subrange but a copy of the base type. */
7870 if ((TREE_CODE (base_type) == INTEGER_TYPE
7871 || TREE_CODE (base_type) == BOOLEAN_TYPE)
7872 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
7873 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
7874 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type)))
7875 {
7876 tree type_name = TYPE_NAME (type);
7877 tree base_type_name = TYPE_NAME (base_type);
7878
7879 if (type_name && TREE_CODE (type_name) == TYPE_DECL)
7880 type_name = DECL_NAME (type_name);
7881
7882 if (base_type_name && TREE_CODE (base_type_name) == TYPE_DECL)
7883 base_type_name = DECL_NAME (base_type_name);
7884
7885 if (type_name == base_type_name)
7886 return false;
7887 }
7888
7889 if (lowval)
7890 *lowval = low;
7891 if (highval)
7892 *highval = high;
7893 return true;
7894 }
7895
7896 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7897 and number of elements specified by the range of values of INDEX_TYPE.
7898 If SHARED is true, reuse such a type that has already been constructed. */
7899
7900 static tree
7901 build_array_type_1 (tree elt_type, tree index_type, bool shared)
7902 {
7903 tree t;
7904
7905 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
7906 {
7907 error ("arrays of functions are not meaningful");
7908 elt_type = integer_type_node;
7909 }
7910
7911 t = make_node (ARRAY_TYPE);
7912 TREE_TYPE (t) = elt_type;
7913 TYPE_DOMAIN (t) = index_type;
7914 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
7915 layout_type (t);
7916
7917 /* If the element type is incomplete at this point we get marked for
7918 structural equality. Do not record these types in the canonical
7919 type hashtable. */
7920 if (TYPE_STRUCTURAL_EQUALITY_P (t))
7921 return t;
7922
7923 if (shared)
7924 {
7925 hashval_t hashcode = iterative_hash_object (TYPE_HASH (elt_type), 0);
7926 if (index_type)
7927 hashcode = iterative_hash_object (TYPE_HASH (index_type), hashcode);
7928 t = type_hash_canon (hashcode, t);
7929 }
7930
7931 if (TYPE_CANONICAL (t) == t)
7932 {
7933 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
7934 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type)))
7935 SET_TYPE_STRUCTURAL_EQUALITY (t);
7936 else if (TYPE_CANONICAL (elt_type) != elt_type
7937 || (index_type && TYPE_CANONICAL (index_type) != index_type))
7938 TYPE_CANONICAL (t)
7939 = build_array_type_1 (TYPE_CANONICAL (elt_type),
7940 index_type
7941 ? TYPE_CANONICAL (index_type) : NULL_TREE,
7942 shared);
7943 }
7944
7945 return t;
7946 }
7947
7948 /* Wrapper around build_array_type_1 with SHARED set to true. */
7949
7950 tree
7951 build_array_type (tree elt_type, tree index_type)
7952 {
7953 return build_array_type_1 (elt_type, index_type, true);
7954 }
7955
7956 /* Wrapper around build_array_type_1 with SHARED set to false. */
7957
7958 tree
7959 build_nonshared_array_type (tree elt_type, tree index_type)
7960 {
7961 return build_array_type_1 (elt_type, index_type, false);
7962 }
7963
7964 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7965 sizetype. */
7966
7967 tree
7968 build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
7969 {
7970 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
7971 }
7972
7973 /* Recursively examines the array elements of TYPE, until a non-array
7974 element type is found. */
7975
7976 tree
7977 strip_array_types (tree type)
7978 {
7979 while (TREE_CODE (type) == ARRAY_TYPE)
7980 type = TREE_TYPE (type);
7981
7982 return type;
7983 }
7984
7985 /* Computes the canonical argument types from the argument type list
7986 ARGTYPES.
7987
7988 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7989 on entry to this function, or if any of the ARGTYPES are
7990 structural.
7991
7992 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7993 true on entry to this function, or if any of the ARGTYPES are
7994 non-canonical.
7995
7996 Returns a canonical argument list, which may be ARGTYPES when the
7997 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7998 true) or would not differ from ARGTYPES. */
7999
8000 static tree
8001 maybe_canonicalize_argtypes (tree argtypes,
8002 bool *any_structural_p,
8003 bool *any_noncanonical_p)
8004 {
8005 tree arg;
8006 bool any_noncanonical_argtypes_p = false;
8007
8008 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
8009 {
8010 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
8011 /* Fail gracefully by stating that the type is structural. */
8012 *any_structural_p = true;
8013 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
8014 *any_structural_p = true;
8015 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
8016 || TREE_PURPOSE (arg))
8017 /* If the argument has a default argument, we consider it
8018 non-canonical even though the type itself is canonical.
8019 That way, different variants of function and method types
8020 with default arguments will all point to the variant with
8021 no defaults as their canonical type. */
8022 any_noncanonical_argtypes_p = true;
8023 }
8024
8025 if (*any_structural_p)
8026 return argtypes;
8027
8028 if (any_noncanonical_argtypes_p)
8029 {
8030 /* Build the canonical list of argument types. */
8031 tree canon_argtypes = NULL_TREE;
8032 bool is_void = false;
8033
8034 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
8035 {
8036 if (arg == void_list_node)
8037 is_void = true;
8038 else
8039 canon_argtypes = tree_cons (NULL_TREE,
8040 TYPE_CANONICAL (TREE_VALUE (arg)),
8041 canon_argtypes);
8042 }
8043
8044 canon_argtypes = nreverse (canon_argtypes);
8045 if (is_void)
8046 canon_argtypes = chainon (canon_argtypes, void_list_node);
8047
8048 /* There is a non-canonical type. */
8049 *any_noncanonical_p = true;
8050 return canon_argtypes;
8051 }
8052
8053 /* The canonical argument types are the same as ARGTYPES. */
8054 return argtypes;
8055 }
8056
8057 /* Construct, lay out and return
8058 the type of functions returning type VALUE_TYPE
8059 given arguments of types ARG_TYPES.
8060 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
8061 are data type nodes for the arguments of the function.
8062 If such a type has already been constructed, reuse it. */
8063
8064 tree
8065 build_function_type (tree value_type, tree arg_types)
8066 {
8067 tree t;
8068 hashval_t hashcode = 0;
8069 bool any_structural_p, any_noncanonical_p;
8070 tree canon_argtypes;
8071
8072 if (TREE_CODE (value_type) == FUNCTION_TYPE)
8073 {
8074 error ("function return type cannot be function");
8075 value_type = integer_type_node;
8076 }
8077
8078 /* Make a node of the sort we want. */
8079 t = make_node (FUNCTION_TYPE);
8080 TREE_TYPE (t) = value_type;
8081 TYPE_ARG_TYPES (t) = arg_types;
8082
8083 /* If we already have such a type, use the old one. */
8084 hashcode = iterative_hash_object (TYPE_HASH (value_type), hashcode);
8085 hashcode = type_hash_list (arg_types, hashcode);
8086 t = type_hash_canon (hashcode, t);
8087
8088 /* Set up the canonical type. */
8089 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8090 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8091 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8092 &any_structural_p,
8093 &any_noncanonical_p);
8094 if (any_structural_p)
8095 SET_TYPE_STRUCTURAL_EQUALITY (t);
8096 else if (any_noncanonical_p)
8097 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8098 canon_argtypes);
8099
8100 if (!COMPLETE_TYPE_P (t))
8101 layout_type (t);
8102 return t;
8103 }
8104
8105 /* Build variant of function type ORIG_TYPE skipping ARGS_TO_SKIP and the
8106 return value if SKIP_RETURN is true. */
8107
8108 static tree
8109 build_function_type_skip_args (tree orig_type, bitmap args_to_skip,
8110 bool skip_return)
8111 {
8112 tree new_type = NULL;
8113 tree args, new_args = NULL, t;
8114 tree new_reversed;
8115 int i = 0;
8116
8117 for (args = TYPE_ARG_TYPES (orig_type); args && args != void_list_node;
8118 args = TREE_CHAIN (args), i++)
8119 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
8120 new_args = tree_cons (NULL_TREE, TREE_VALUE (args), new_args);
8121
8122 new_reversed = nreverse (new_args);
8123 if (args)
8124 {
8125 if (new_reversed)
8126 TREE_CHAIN (new_args) = void_list_node;
8127 else
8128 new_reversed = void_list_node;
8129 }
8130
8131 /* Use copy_node to preserve as much as possible from original type
8132 (debug info, attribute lists etc.)
8133 Exception is METHOD_TYPEs must have THIS argument.
8134 When we are asked to remove it, we need to build new FUNCTION_TYPE
8135 instead. */
8136 if (TREE_CODE (orig_type) != METHOD_TYPE
8137 || !args_to_skip
8138 || !bitmap_bit_p (args_to_skip, 0))
8139 {
8140 new_type = build_distinct_type_copy (orig_type);
8141 TYPE_ARG_TYPES (new_type) = new_reversed;
8142 }
8143 else
8144 {
8145 new_type
8146 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
8147 new_reversed));
8148 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
8149 }
8150
8151 if (skip_return)
8152 TREE_TYPE (new_type) = void_type_node;
8153
8154 /* This is a new type, not a copy of an old type. Need to reassociate
8155 variants. We can handle everything except the main variant lazily. */
8156 t = TYPE_MAIN_VARIANT (orig_type);
8157 if (t != orig_type)
8158 {
8159 t = build_function_type_skip_args (t, args_to_skip, skip_return);
8160 TYPE_MAIN_VARIANT (new_type) = t;
8161 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
8162 TYPE_NEXT_VARIANT (t) = new_type;
8163 }
8164 else
8165 {
8166 TYPE_MAIN_VARIANT (new_type) = new_type;
8167 TYPE_NEXT_VARIANT (new_type) = NULL;
8168 }
8169
8170 return new_type;
8171 }
8172
8173 /* Build variant of function decl ORIG_DECL skipping ARGS_TO_SKIP and the
8174 return value if SKIP_RETURN is true.
8175
8176 Arguments from DECL_ARGUMENTS list can't be removed now, since they are
8177 linked by TREE_CHAIN directly. The caller is responsible for eliminating
8178 them when they are being duplicated (i.e. copy_arguments_for_versioning). */
8179
8180 tree
8181 build_function_decl_skip_args (tree orig_decl, bitmap args_to_skip,
8182 bool skip_return)
8183 {
8184 tree new_decl = copy_node (orig_decl);
8185 tree new_type;
8186
8187 new_type = TREE_TYPE (orig_decl);
8188 if (prototype_p (new_type)
8189 || (skip_return && !VOID_TYPE_P (TREE_TYPE (new_type))))
8190 new_type
8191 = build_function_type_skip_args (new_type, args_to_skip, skip_return);
8192 TREE_TYPE (new_decl) = new_type;
8193
8194 /* For declarations setting DECL_VINDEX (i.e. methods)
8195 we expect first argument to be THIS pointer. */
8196 if (args_to_skip && bitmap_bit_p (args_to_skip, 0))
8197 DECL_VINDEX (new_decl) = NULL_TREE;
8198
8199 /* When signature changes, we need to clear builtin info. */
8200 if (DECL_BUILT_IN (new_decl)
8201 && args_to_skip
8202 && !bitmap_empty_p (args_to_skip))
8203 {
8204 DECL_BUILT_IN_CLASS (new_decl) = NOT_BUILT_IN;
8205 DECL_FUNCTION_CODE (new_decl) = (enum built_in_function) 0;
8206 }
8207 return new_decl;
8208 }
8209
8210 /* Build a function type. The RETURN_TYPE is the type returned by the
8211 function. If VAARGS is set, no void_type_node is appended to the
8212 the list. ARGP must be always be terminated be a NULL_TREE. */
8213
8214 static tree
8215 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8216 {
8217 tree t, args, last;
8218
8219 t = va_arg (argp, tree);
8220 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8221 args = tree_cons (NULL_TREE, t, args);
8222
8223 if (vaargs)
8224 {
8225 last = args;
8226 if (args != NULL_TREE)
8227 args = nreverse (args);
8228 gcc_assert (last != void_list_node);
8229 }
8230 else if (args == NULL_TREE)
8231 args = void_list_node;
8232 else
8233 {
8234 last = args;
8235 args = nreverse (args);
8236 TREE_CHAIN (last) = void_list_node;
8237 }
8238 args = build_function_type (return_type, args);
8239
8240 return args;
8241 }
8242
8243 /* Build a function type. The RETURN_TYPE is the type returned by the
8244 function. If additional arguments are provided, they are
8245 additional argument types. The list of argument types must always
8246 be terminated by NULL_TREE. */
8247
8248 tree
8249 build_function_type_list (tree return_type, ...)
8250 {
8251 tree args;
8252 va_list p;
8253
8254 va_start (p, return_type);
8255 args = build_function_type_list_1 (false, return_type, p);
8256 va_end (p);
8257 return args;
8258 }
8259
8260 /* Build a variable argument function type. The RETURN_TYPE is the
8261 type returned by the function. If additional arguments are provided,
8262 they are additional argument types. The list of argument types must
8263 always be terminated by NULL_TREE. */
8264
8265 tree
8266 build_varargs_function_type_list (tree return_type, ...)
8267 {
8268 tree args;
8269 va_list p;
8270
8271 va_start (p, return_type);
8272 args = build_function_type_list_1 (true, return_type, p);
8273 va_end (p);
8274
8275 return args;
8276 }
8277
8278 /* Build a function type. RETURN_TYPE is the type returned by the
8279 function; VAARGS indicates whether the function takes varargs. The
8280 function takes N named arguments, the types of which are provided in
8281 ARG_TYPES. */
8282
8283 static tree
8284 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8285 tree *arg_types)
8286 {
8287 int i;
8288 tree t = vaargs ? NULL_TREE : void_list_node;
8289
8290 for (i = n - 1; i >= 0; i--)
8291 t = tree_cons (NULL_TREE, arg_types[i], t);
8292
8293 return build_function_type (return_type, t);
8294 }
8295
8296 /* Build a function type. RETURN_TYPE is the type returned by the
8297 function. The function takes N named arguments, the types of which
8298 are provided in ARG_TYPES. */
8299
8300 tree
8301 build_function_type_array (tree return_type, int n, tree *arg_types)
8302 {
8303 return build_function_type_array_1 (false, return_type, n, arg_types);
8304 }
8305
8306 /* Build a variable argument function type. RETURN_TYPE is the type
8307 returned by the function. The function takes N named arguments, the
8308 types of which are provided in ARG_TYPES. */
8309
8310 tree
8311 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8312 {
8313 return build_function_type_array_1 (true, return_type, n, arg_types);
8314 }
8315
8316 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8317 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8318 for the method. An implicit additional parameter (of type
8319 pointer-to-BASETYPE) is added to the ARGTYPES. */
8320
8321 tree
8322 build_method_type_directly (tree basetype,
8323 tree rettype,
8324 tree argtypes)
8325 {
8326 tree t;
8327 tree ptype;
8328 int hashcode = 0;
8329 bool any_structural_p, any_noncanonical_p;
8330 tree canon_argtypes;
8331
8332 /* Make a node of the sort we want. */
8333 t = make_node (METHOD_TYPE);
8334
8335 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8336 TREE_TYPE (t) = rettype;
8337 ptype = build_pointer_type (basetype);
8338
8339 /* The actual arglist for this function includes a "hidden" argument
8340 which is "this". Put it into the list of argument types. */
8341 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8342 TYPE_ARG_TYPES (t) = argtypes;
8343
8344 /* If we already have such a type, use the old one. */
8345 hashcode = iterative_hash_object (TYPE_HASH (basetype), hashcode);
8346 hashcode = iterative_hash_object (TYPE_HASH (rettype), hashcode);
8347 hashcode = type_hash_list (argtypes, hashcode);
8348 t = type_hash_canon (hashcode, t);
8349
8350 /* Set up the canonical type. */
8351 any_structural_p
8352 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8353 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8354 any_noncanonical_p
8355 = (TYPE_CANONICAL (basetype) != basetype
8356 || TYPE_CANONICAL (rettype) != rettype);
8357 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8358 &any_structural_p,
8359 &any_noncanonical_p);
8360 if (any_structural_p)
8361 SET_TYPE_STRUCTURAL_EQUALITY (t);
8362 else if (any_noncanonical_p)
8363 TYPE_CANONICAL (t)
8364 = build_method_type_directly (TYPE_CANONICAL (basetype),
8365 TYPE_CANONICAL (rettype),
8366 canon_argtypes);
8367 if (!COMPLETE_TYPE_P (t))
8368 layout_type (t);
8369
8370 return t;
8371 }
8372
8373 /* Construct, lay out and return the type of methods belonging to class
8374 BASETYPE and whose arguments and values are described by TYPE.
8375 If that type exists already, reuse it.
8376 TYPE must be a FUNCTION_TYPE node. */
8377
8378 tree
8379 build_method_type (tree basetype, tree type)
8380 {
8381 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8382
8383 return build_method_type_directly (basetype,
8384 TREE_TYPE (type),
8385 TYPE_ARG_TYPES (type));
8386 }
8387
8388 /* Construct, lay out and return the type of offsets to a value
8389 of type TYPE, within an object of type BASETYPE.
8390 If a suitable offset type exists already, reuse it. */
8391
8392 tree
8393 build_offset_type (tree basetype, tree type)
8394 {
8395 tree t;
8396 hashval_t hashcode = 0;
8397
8398 /* Make a node of the sort we want. */
8399 t = make_node (OFFSET_TYPE);
8400
8401 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8402 TREE_TYPE (t) = type;
8403
8404 /* If we already have such a type, use the old one. */
8405 hashcode = iterative_hash_object (TYPE_HASH (basetype), hashcode);
8406 hashcode = iterative_hash_object (TYPE_HASH (type), hashcode);
8407 t = type_hash_canon (hashcode, t);
8408
8409 if (!COMPLETE_TYPE_P (t))
8410 layout_type (t);
8411
8412 if (TYPE_CANONICAL (t) == t)
8413 {
8414 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8415 || TYPE_STRUCTURAL_EQUALITY_P (type))
8416 SET_TYPE_STRUCTURAL_EQUALITY (t);
8417 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8418 || TYPE_CANONICAL (type) != type)
8419 TYPE_CANONICAL (t)
8420 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8421 TYPE_CANONICAL (type));
8422 }
8423
8424 return t;
8425 }
8426
8427 /* Create a complex type whose components are COMPONENT_TYPE. */
8428
8429 tree
8430 build_complex_type (tree component_type)
8431 {
8432 tree t;
8433 hashval_t hashcode;
8434
8435 gcc_assert (INTEGRAL_TYPE_P (component_type)
8436 || SCALAR_FLOAT_TYPE_P (component_type)
8437 || FIXED_POINT_TYPE_P (component_type));
8438
8439 /* Make a node of the sort we want. */
8440 t = make_node (COMPLEX_TYPE);
8441
8442 TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type);
8443
8444 /* If we already have such a type, use the old one. */
8445 hashcode = iterative_hash_object (TYPE_HASH (component_type), 0);
8446 t = type_hash_canon (hashcode, t);
8447
8448 if (!COMPLETE_TYPE_P (t))
8449 layout_type (t);
8450
8451 if (TYPE_CANONICAL (t) == t)
8452 {
8453 if (TYPE_STRUCTURAL_EQUALITY_P (component_type))
8454 SET_TYPE_STRUCTURAL_EQUALITY (t);
8455 else if (TYPE_CANONICAL (component_type) != component_type)
8456 TYPE_CANONICAL (t)
8457 = build_complex_type (TYPE_CANONICAL (component_type));
8458 }
8459
8460 /* We need to create a name, since complex is a fundamental type. */
8461 if (! TYPE_NAME (t))
8462 {
8463 const char *name;
8464 if (component_type == char_type_node)
8465 name = "complex char";
8466 else if (component_type == signed_char_type_node)
8467 name = "complex signed char";
8468 else if (component_type == unsigned_char_type_node)
8469 name = "complex unsigned char";
8470 else if (component_type == short_integer_type_node)
8471 name = "complex short int";
8472 else if (component_type == short_unsigned_type_node)
8473 name = "complex short unsigned int";
8474 else if (component_type == integer_type_node)
8475 name = "complex int";
8476 else if (component_type == unsigned_type_node)
8477 name = "complex unsigned int";
8478 else if (component_type == long_integer_type_node)
8479 name = "complex long int";
8480 else if (component_type == long_unsigned_type_node)
8481 name = "complex long unsigned int";
8482 else if (component_type == long_long_integer_type_node)
8483 name = "complex long long int";
8484 else if (component_type == long_long_unsigned_type_node)
8485 name = "complex long long unsigned int";
8486 else
8487 name = 0;
8488
8489 if (name != 0)
8490 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8491 get_identifier (name), t);
8492 }
8493
8494 return build_qualified_type (t, TYPE_QUALS (component_type));
8495 }
8496
8497 /* If TYPE is a real or complex floating-point type and the target
8498 does not directly support arithmetic on TYPE then return the wider
8499 type to be used for arithmetic on TYPE. Otherwise, return
8500 NULL_TREE. */
8501
8502 tree
8503 excess_precision_type (tree type)
8504 {
8505 if (flag_excess_precision != EXCESS_PRECISION_FAST)
8506 {
8507 int flt_eval_method = TARGET_FLT_EVAL_METHOD;
8508 switch (TREE_CODE (type))
8509 {
8510 case REAL_TYPE:
8511 switch (flt_eval_method)
8512 {
8513 case 1:
8514 if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
8515 return double_type_node;
8516 break;
8517 case 2:
8518 if (TYPE_MODE (type) == TYPE_MODE (float_type_node)
8519 || TYPE_MODE (type) == TYPE_MODE (double_type_node))
8520 return long_double_type_node;
8521 break;
8522 default:
8523 gcc_unreachable ();
8524 }
8525 break;
8526 case COMPLEX_TYPE:
8527 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8528 return NULL_TREE;
8529 switch (flt_eval_method)
8530 {
8531 case 1:
8532 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node))
8533 return complex_double_type_node;
8534 break;
8535 case 2:
8536 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node)
8537 || (TYPE_MODE (TREE_TYPE (type))
8538 == TYPE_MODE (double_type_node)))
8539 return complex_long_double_type_node;
8540 break;
8541 default:
8542 gcc_unreachable ();
8543 }
8544 break;
8545 default:
8546 break;
8547 }
8548 }
8549 return NULL_TREE;
8550 }
8551 \f
8552 /* Return OP, stripped of any conversions to wider types as much as is safe.
8553 Converting the value back to OP's type makes a value equivalent to OP.
8554
8555 If FOR_TYPE is nonzero, we return a value which, if converted to
8556 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8557
8558 OP must have integer, real or enumeral type. Pointers are not allowed!
8559
8560 There are some cases where the obvious value we could return
8561 would regenerate to OP if converted to OP's type,
8562 but would not extend like OP to wider types.
8563 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8564 For example, if OP is (unsigned short)(signed char)-1,
8565 we avoid returning (signed char)-1 if FOR_TYPE is int,
8566 even though extending that to an unsigned short would regenerate OP,
8567 since the result of extending (signed char)-1 to (int)
8568 is different from (int) OP. */
8569
8570 tree
8571 get_unwidened (tree op, tree for_type)
8572 {
8573 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8574 tree type = TREE_TYPE (op);
8575 unsigned final_prec
8576 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8577 int uns
8578 = (for_type != 0 && for_type != type
8579 && final_prec > TYPE_PRECISION (type)
8580 && TYPE_UNSIGNED (type));
8581 tree win = op;
8582
8583 while (CONVERT_EXPR_P (op))
8584 {
8585 int bitschange;
8586
8587 /* TYPE_PRECISION on vector types has different meaning
8588 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8589 so avoid them here. */
8590 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8591 break;
8592
8593 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8594 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8595
8596 /* Truncations are many-one so cannot be removed.
8597 Unless we are later going to truncate down even farther. */
8598 if (bitschange < 0
8599 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8600 break;
8601
8602 /* See what's inside this conversion. If we decide to strip it,
8603 we will set WIN. */
8604 op = TREE_OPERAND (op, 0);
8605
8606 /* If we have not stripped any zero-extensions (uns is 0),
8607 we can strip any kind of extension.
8608 If we have previously stripped a zero-extension,
8609 only zero-extensions can safely be stripped.
8610 Any extension can be stripped if the bits it would produce
8611 are all going to be discarded later by truncating to FOR_TYPE. */
8612
8613 if (bitschange > 0)
8614 {
8615 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8616 win = op;
8617 /* TYPE_UNSIGNED says whether this is a zero-extension.
8618 Let's avoid computing it if it does not affect WIN
8619 and if UNS will not be needed again. */
8620 if ((uns
8621 || CONVERT_EXPR_P (op))
8622 && TYPE_UNSIGNED (TREE_TYPE (op)))
8623 {
8624 uns = 1;
8625 win = op;
8626 }
8627 }
8628 }
8629
8630 /* If we finally reach a constant see if it fits in for_type and
8631 in that case convert it. */
8632 if (for_type
8633 && TREE_CODE (win) == INTEGER_CST
8634 && TREE_TYPE (win) != for_type
8635 && int_fits_type_p (win, for_type))
8636 win = fold_convert (for_type, win);
8637
8638 return win;
8639 }
8640 \f
8641 /* Return OP or a simpler expression for a narrower value
8642 which can be sign-extended or zero-extended to give back OP.
8643 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8644 or 0 if the value should be sign-extended. */
8645
8646 tree
8647 get_narrower (tree op, int *unsignedp_ptr)
8648 {
8649 int uns = 0;
8650 int first = 1;
8651 tree win = op;
8652 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8653
8654 while (TREE_CODE (op) == NOP_EXPR)
8655 {
8656 int bitschange
8657 = (TYPE_PRECISION (TREE_TYPE (op))
8658 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8659
8660 /* Truncations are many-one so cannot be removed. */
8661 if (bitschange < 0)
8662 break;
8663
8664 /* See what's inside this conversion. If we decide to strip it,
8665 we will set WIN. */
8666
8667 if (bitschange > 0)
8668 {
8669 op = TREE_OPERAND (op, 0);
8670 /* An extension: the outermost one can be stripped,
8671 but remember whether it is zero or sign extension. */
8672 if (first)
8673 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8674 /* Otherwise, if a sign extension has been stripped,
8675 only sign extensions can now be stripped;
8676 if a zero extension has been stripped, only zero-extensions. */
8677 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8678 break;
8679 first = 0;
8680 }
8681 else /* bitschange == 0 */
8682 {
8683 /* A change in nominal type can always be stripped, but we must
8684 preserve the unsignedness. */
8685 if (first)
8686 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8687 first = 0;
8688 op = TREE_OPERAND (op, 0);
8689 /* Keep trying to narrow, but don't assign op to win if it
8690 would turn an integral type into something else. */
8691 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8692 continue;
8693 }
8694
8695 win = op;
8696 }
8697
8698 if (TREE_CODE (op) == COMPONENT_REF
8699 /* Since type_for_size always gives an integer type. */
8700 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8701 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8702 /* Ensure field is laid out already. */
8703 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8704 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8705 {
8706 unsigned HOST_WIDE_INT innerprec
8707 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8708 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8709 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8710 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8711
8712 /* We can get this structure field in a narrower type that fits it,
8713 but the resulting extension to its nominal type (a fullword type)
8714 must satisfy the same conditions as for other extensions.
8715
8716 Do this only for fields that are aligned (not bit-fields),
8717 because when bit-field insns will be used there is no
8718 advantage in doing this. */
8719
8720 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8721 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8722 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8723 && type != 0)
8724 {
8725 if (first)
8726 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8727 win = fold_convert (type, op);
8728 }
8729 }
8730
8731 *unsignedp_ptr = uns;
8732 return win;
8733 }
8734 \f
8735 /* Returns true if integer constant C has a value that is permissible
8736 for type TYPE (an INTEGER_TYPE). */
8737
8738 bool
8739 int_fits_type_p (const_tree c, const_tree type)
8740 {
8741 tree type_low_bound, type_high_bound;
8742 bool ok_for_low_bound, ok_for_high_bound;
8743 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8744
8745 retry:
8746 type_low_bound = TYPE_MIN_VALUE (type);
8747 type_high_bound = TYPE_MAX_VALUE (type);
8748
8749 /* If at least one bound of the type is a constant integer, we can check
8750 ourselves and maybe make a decision. If no such decision is possible, but
8751 this type is a subtype, try checking against that. Otherwise, use
8752 fits_to_tree_p, which checks against the precision.
8753
8754 Compute the status for each possibly constant bound, and return if we see
8755 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8756 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8757 for "constant known to fit". */
8758
8759 /* Check if c >= type_low_bound. */
8760 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8761 {
8762 if (INT_CST_LT (c, type_low_bound))
8763 return false;
8764 ok_for_low_bound = true;
8765 }
8766 else
8767 ok_for_low_bound = false;
8768
8769 /* Check if c <= type_high_bound. */
8770 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8771 {
8772 if (INT_CST_LT (type_high_bound, c))
8773 return false;
8774 ok_for_high_bound = true;
8775 }
8776 else
8777 ok_for_high_bound = false;
8778
8779 /* If the constant fits both bounds, the result is known. */
8780 if (ok_for_low_bound && ok_for_high_bound)
8781 return true;
8782
8783 /* Perform some generic filtering which may allow making a decision
8784 even if the bounds are not constant. First, negative integers
8785 never fit in unsigned types, */
8786 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (c))
8787 return false;
8788
8789 /* Second, narrower types always fit in wider ones. */
8790 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8791 return true;
8792
8793 /* Third, unsigned integers with top bit set never fit signed types. */
8794 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED && wi::neg_p (c))
8795 return false;
8796
8797 /* If we haven't been able to decide at this point, there nothing more we
8798 can check ourselves here. Look at the base type if we have one and it
8799 has the same precision. */
8800 if (TREE_CODE (type) == INTEGER_TYPE
8801 && TREE_TYPE (type) != 0
8802 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8803 {
8804 type = TREE_TYPE (type);
8805 goto retry;
8806 }
8807
8808 /* Or to fits_to_tree_p, if nothing else. */
8809 return wi::fits_to_tree_p (c, type);
8810 }
8811
8812 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8813 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8814 represented (assuming two's-complement arithmetic) within the bit
8815 precision of the type are returned instead. */
8816
8817 void
8818 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8819 {
8820 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8821 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8822 wi::to_mpz (TYPE_MIN_VALUE (type), min, TYPE_SIGN (type));
8823 else
8824 {
8825 if (TYPE_UNSIGNED (type))
8826 mpz_set_ui (min, 0);
8827 else
8828 {
8829 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8830 wi::to_mpz (mn, min, SIGNED);
8831 }
8832 }
8833
8834 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8835 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8836 wi::to_mpz (TYPE_MAX_VALUE (type), max, TYPE_SIGN (type));
8837 else
8838 {
8839 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8840 wi::to_mpz (mn, max, TYPE_SIGN (type));
8841 }
8842 }
8843
8844 /* Return true if VAR is an automatic variable defined in function FN. */
8845
8846 bool
8847 auto_var_in_fn_p (const_tree var, const_tree fn)
8848 {
8849 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8850 && ((((TREE_CODE (var) == VAR_DECL && ! DECL_EXTERNAL (var))
8851 || TREE_CODE (var) == PARM_DECL)
8852 && ! TREE_STATIC (var))
8853 || TREE_CODE (var) == LABEL_DECL
8854 || TREE_CODE (var) == RESULT_DECL));
8855 }
8856
8857 /* Subprogram of following function. Called by walk_tree.
8858
8859 Return *TP if it is an automatic variable or parameter of the
8860 function passed in as DATA. */
8861
8862 static tree
8863 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8864 {
8865 tree fn = (tree) data;
8866
8867 if (TYPE_P (*tp))
8868 *walk_subtrees = 0;
8869
8870 else if (DECL_P (*tp)
8871 && auto_var_in_fn_p (*tp, fn))
8872 return *tp;
8873
8874 return NULL_TREE;
8875 }
8876
8877 /* Returns true if T is, contains, or refers to a type with variable
8878 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8879 arguments, but not the return type. If FN is nonzero, only return
8880 true if a modifier of the type or position of FN is a variable or
8881 parameter inside FN.
8882
8883 This concept is more general than that of C99 'variably modified types':
8884 in C99, a struct type is never variably modified because a VLA may not
8885 appear as a structure member. However, in GNU C code like:
8886
8887 struct S { int i[f()]; };
8888
8889 is valid, and other languages may define similar constructs. */
8890
8891 bool
8892 variably_modified_type_p (tree type, tree fn)
8893 {
8894 tree t;
8895
8896 /* Test if T is either variable (if FN is zero) or an expression containing
8897 a variable in FN. If TYPE isn't gimplified, return true also if
8898 gimplify_one_sizepos would gimplify the expression into a local
8899 variable. */
8900 #define RETURN_TRUE_IF_VAR(T) \
8901 do { tree _t = (T); \
8902 if (_t != NULL_TREE \
8903 && _t != error_mark_node \
8904 && TREE_CODE (_t) != INTEGER_CST \
8905 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8906 && (!fn \
8907 || (!TYPE_SIZES_GIMPLIFIED (type) \
8908 && !is_gimple_sizepos (_t)) \
8909 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8910 return true; } while (0)
8911
8912 if (type == error_mark_node)
8913 return false;
8914
8915 /* If TYPE itself has variable size, it is variably modified. */
8916 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8917 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8918
8919 switch (TREE_CODE (type))
8920 {
8921 case POINTER_TYPE:
8922 case REFERENCE_TYPE:
8923 case VECTOR_TYPE:
8924 if (variably_modified_type_p (TREE_TYPE (type), fn))
8925 return true;
8926 break;
8927
8928 case FUNCTION_TYPE:
8929 case METHOD_TYPE:
8930 /* If TYPE is a function type, it is variably modified if the
8931 return type is variably modified. */
8932 if (variably_modified_type_p (TREE_TYPE (type), fn))
8933 return true;
8934 break;
8935
8936 case INTEGER_TYPE:
8937 case REAL_TYPE:
8938 case FIXED_POINT_TYPE:
8939 case ENUMERAL_TYPE:
8940 case BOOLEAN_TYPE:
8941 /* Scalar types are variably modified if their end points
8942 aren't constant. */
8943 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8944 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8945 break;
8946
8947 case RECORD_TYPE:
8948 case UNION_TYPE:
8949 case QUAL_UNION_TYPE:
8950 /* We can't see if any of the fields are variably-modified by the
8951 definition we normally use, since that would produce infinite
8952 recursion via pointers. */
8953 /* This is variably modified if some field's type is. */
8954 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8955 if (TREE_CODE (t) == FIELD_DECL)
8956 {
8957 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8958 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8959 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8960
8961 if (TREE_CODE (type) == QUAL_UNION_TYPE)
8962 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
8963 }
8964 break;
8965
8966 case ARRAY_TYPE:
8967 /* Do not call ourselves to avoid infinite recursion. This is
8968 variably modified if the element type is. */
8969 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
8970 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
8971 break;
8972
8973 default:
8974 break;
8975 }
8976
8977 /* The current language may have other cases to check, but in general,
8978 all other types are not variably modified. */
8979 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
8980
8981 #undef RETURN_TRUE_IF_VAR
8982 }
8983
8984 /* Given a DECL or TYPE, return the scope in which it was declared, or
8985 NULL_TREE if there is no containing scope. */
8986
8987 tree
8988 get_containing_scope (const_tree t)
8989 {
8990 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
8991 }
8992
8993 /* Return the innermost context enclosing DECL that is
8994 a FUNCTION_DECL, or zero if none. */
8995
8996 tree
8997 decl_function_context (const_tree decl)
8998 {
8999 tree context;
9000
9001 if (TREE_CODE (decl) == ERROR_MARK)
9002 return 0;
9003
9004 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
9005 where we look up the function at runtime. Such functions always take
9006 a first argument of type 'pointer to real context'.
9007
9008 C++ should really be fixed to use DECL_CONTEXT for the real context,
9009 and use something else for the "virtual context". */
9010 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
9011 context
9012 = TYPE_MAIN_VARIANT
9013 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
9014 else
9015 context = DECL_CONTEXT (decl);
9016
9017 while (context && TREE_CODE (context) != FUNCTION_DECL)
9018 {
9019 if (TREE_CODE (context) == BLOCK)
9020 context = BLOCK_SUPERCONTEXT (context);
9021 else
9022 context = get_containing_scope (context);
9023 }
9024
9025 return context;
9026 }
9027
9028 /* Return the innermost context enclosing DECL that is
9029 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
9030 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
9031
9032 tree
9033 decl_type_context (const_tree decl)
9034 {
9035 tree context = DECL_CONTEXT (decl);
9036
9037 while (context)
9038 switch (TREE_CODE (context))
9039 {
9040 case NAMESPACE_DECL:
9041 case TRANSLATION_UNIT_DECL:
9042 return NULL_TREE;
9043
9044 case RECORD_TYPE:
9045 case UNION_TYPE:
9046 case QUAL_UNION_TYPE:
9047 return context;
9048
9049 case TYPE_DECL:
9050 case FUNCTION_DECL:
9051 context = DECL_CONTEXT (context);
9052 break;
9053
9054 case BLOCK:
9055 context = BLOCK_SUPERCONTEXT (context);
9056 break;
9057
9058 default:
9059 gcc_unreachable ();
9060 }
9061
9062 return NULL_TREE;
9063 }
9064
9065 /* CALL is a CALL_EXPR. Return the declaration for the function
9066 called, or NULL_TREE if the called function cannot be
9067 determined. */
9068
9069 tree
9070 get_callee_fndecl (const_tree call)
9071 {
9072 tree addr;
9073
9074 if (call == error_mark_node)
9075 return error_mark_node;
9076
9077 /* It's invalid to call this function with anything but a
9078 CALL_EXPR. */
9079 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9080
9081 /* The first operand to the CALL is the address of the function
9082 called. */
9083 addr = CALL_EXPR_FN (call);
9084
9085 STRIP_NOPS (addr);
9086
9087 /* If this is a readonly function pointer, extract its initial value. */
9088 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
9089 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
9090 && DECL_INITIAL (addr))
9091 addr = DECL_INITIAL (addr);
9092
9093 /* If the address is just `&f' for some function `f', then we know
9094 that `f' is being called. */
9095 if (TREE_CODE (addr) == ADDR_EXPR
9096 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
9097 return TREE_OPERAND (addr, 0);
9098
9099 /* We couldn't figure out what was being called. */
9100 return NULL_TREE;
9101 }
9102
9103 /* Print debugging information about tree nodes generated during the compile,
9104 and any language-specific information. */
9105
9106 void
9107 dump_tree_statistics (void)
9108 {
9109 if (GATHER_STATISTICS)
9110 {
9111 int i;
9112 int total_nodes, total_bytes;
9113 fprintf (stderr, "Kind Nodes Bytes\n");
9114 fprintf (stderr, "---------------------------------------\n");
9115 total_nodes = total_bytes = 0;
9116 for (i = 0; i < (int) all_kinds; i++)
9117 {
9118 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i],
9119 tree_node_counts[i], tree_node_sizes[i]);
9120 total_nodes += tree_node_counts[i];
9121 total_bytes += tree_node_sizes[i];
9122 }
9123 fprintf (stderr, "---------------------------------------\n");
9124 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes);
9125 fprintf (stderr, "---------------------------------------\n");
9126 fprintf (stderr, "Code Nodes\n");
9127 fprintf (stderr, "----------------------------\n");
9128 for (i = 0; i < (int) MAX_TREE_CODES; i++)
9129 fprintf (stderr, "%-20s %7d\n", get_tree_code_name ((enum tree_code) i),
9130 tree_code_counts[i]);
9131 fprintf (stderr, "----------------------------\n");
9132 ssanames_print_statistics ();
9133 phinodes_print_statistics ();
9134 }
9135 else
9136 fprintf (stderr, "(No per-node statistics)\n");
9137
9138 print_type_hash_statistics ();
9139 print_debug_expr_statistics ();
9140 print_value_expr_statistics ();
9141 lang_hooks.print_statistics ();
9142 }
9143 \f
9144 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9145
9146 /* Generate a crc32 of a byte. */
9147
9148 static unsigned
9149 crc32_unsigned_bits (unsigned chksum, unsigned value, unsigned bits)
9150 {
9151 unsigned ix;
9152
9153 for (ix = bits; ix--; value <<= 1)
9154 {
9155 unsigned feedback;
9156
9157 feedback = (value ^ chksum) & 0x80000000 ? 0x04c11db7 : 0;
9158 chksum <<= 1;
9159 chksum ^= feedback;
9160 }
9161 return chksum;
9162 }
9163
9164 /* Generate a crc32 of a 32-bit unsigned. */
9165
9166 unsigned
9167 crc32_unsigned (unsigned chksum, unsigned value)
9168 {
9169 return crc32_unsigned_bits (chksum, value, 32);
9170 }
9171
9172 /* Generate a crc32 of a byte. */
9173
9174 unsigned
9175 crc32_byte (unsigned chksum, char byte)
9176 {
9177 return crc32_unsigned_bits (chksum, (unsigned) byte << 24, 8);
9178 }
9179
9180 /* Generate a crc32 of a string. */
9181
9182 unsigned
9183 crc32_string (unsigned chksum, const char *string)
9184 {
9185 do
9186 {
9187 chksum = crc32_byte (chksum, *string);
9188 }
9189 while (*string++);
9190 return chksum;
9191 }
9192
9193 /* P is a string that will be used in a symbol. Mask out any characters
9194 that are not valid in that context. */
9195
9196 void
9197 clean_symbol_name (char *p)
9198 {
9199 for (; *p; p++)
9200 if (! (ISALNUM (*p)
9201 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9202 || *p == '$'
9203 #endif
9204 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9205 || *p == '.'
9206 #endif
9207 ))
9208 *p = '_';
9209 }
9210
9211 /* Generate a name for a special-purpose function.
9212 The generated name may need to be unique across the whole link.
9213 Changes to this function may also require corresponding changes to
9214 xstrdup_mask_random.
9215 TYPE is some string to identify the purpose of this function to the
9216 linker or collect2; it must start with an uppercase letter,
9217 one of:
9218 I - for constructors
9219 D - for destructors
9220 N - for C++ anonymous namespaces
9221 F - for DWARF unwind frame information. */
9222
9223 tree
9224 get_file_function_name (const char *type)
9225 {
9226 char *buf;
9227 const char *p;
9228 char *q;
9229
9230 /* If we already have a name we know to be unique, just use that. */
9231 if (first_global_object_name)
9232 p = q = ASTRDUP (first_global_object_name);
9233 /* If the target is handling the constructors/destructors, they
9234 will be local to this file and the name is only necessary for
9235 debugging purposes.
9236 We also assign sub_I and sub_D sufixes to constructors called from
9237 the global static constructors. These are always local. */
9238 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9239 || (strncmp (type, "sub_", 4) == 0
9240 && (type[4] == 'I' || type[4] == 'D')))
9241 {
9242 const char *file = main_input_filename;
9243 if (! file)
9244 file = input_filename;
9245 /* Just use the file's basename, because the full pathname
9246 might be quite long. */
9247 p = q = ASTRDUP (lbasename (file));
9248 }
9249 else
9250 {
9251 /* Otherwise, the name must be unique across the entire link.
9252 We don't have anything that we know to be unique to this translation
9253 unit, so use what we do have and throw in some randomness. */
9254 unsigned len;
9255 const char *name = weak_global_object_name;
9256 const char *file = main_input_filename;
9257
9258 if (! name)
9259 name = "";
9260 if (! file)
9261 file = input_filename;
9262
9263 len = strlen (file);
9264 q = (char *) alloca (9 + 17 + len + 1);
9265 memcpy (q, file, len + 1);
9266
9267 snprintf (q + len, 9 + 17 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9268 crc32_string (0, name), get_random_seed (false));
9269
9270 p = q;
9271 }
9272
9273 clean_symbol_name (q);
9274 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9275 + strlen (type));
9276
9277 /* Set up the name of the file-level functions we may need.
9278 Use a global object (which is already required to be unique over
9279 the program) rather than the file name (which imposes extra
9280 constraints). */
9281 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9282
9283 return get_identifier (buf);
9284 }
9285 \f
9286 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9287
9288 /* Complain that the tree code of NODE does not match the expected 0
9289 terminated list of trailing codes. The trailing code list can be
9290 empty, for a more vague error message. FILE, LINE, and FUNCTION
9291 are of the caller. */
9292
9293 void
9294 tree_check_failed (const_tree node, const char *file,
9295 int line, const char *function, ...)
9296 {
9297 va_list args;
9298 const char *buffer;
9299 unsigned length = 0;
9300 enum tree_code code;
9301
9302 va_start (args, function);
9303 while ((code = (enum tree_code) va_arg (args, int)))
9304 length += 4 + strlen (get_tree_code_name (code));
9305 va_end (args);
9306 if (length)
9307 {
9308 char *tmp;
9309 va_start (args, function);
9310 length += strlen ("expected ");
9311 buffer = tmp = (char *) alloca (length);
9312 length = 0;
9313 while ((code = (enum tree_code) va_arg (args, int)))
9314 {
9315 const char *prefix = length ? " or " : "expected ";
9316
9317 strcpy (tmp + length, prefix);
9318 length += strlen (prefix);
9319 strcpy (tmp + length, get_tree_code_name (code));
9320 length += strlen (get_tree_code_name (code));
9321 }
9322 va_end (args);
9323 }
9324 else
9325 buffer = "unexpected node";
9326
9327 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9328 buffer, get_tree_code_name (TREE_CODE (node)),
9329 function, trim_filename (file), line);
9330 }
9331
9332 /* Complain that the tree code of NODE does match the expected 0
9333 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9334 the caller. */
9335
9336 void
9337 tree_not_check_failed (const_tree node, const char *file,
9338 int line, const char *function, ...)
9339 {
9340 va_list args;
9341 char *buffer;
9342 unsigned length = 0;
9343 enum tree_code code;
9344
9345 va_start (args, function);
9346 while ((code = (enum tree_code) va_arg (args, int)))
9347 length += 4 + strlen (get_tree_code_name (code));
9348 va_end (args);
9349 va_start (args, function);
9350 buffer = (char *) alloca (length);
9351 length = 0;
9352 while ((code = (enum tree_code) va_arg (args, int)))
9353 {
9354 if (length)
9355 {
9356 strcpy (buffer + length, " or ");
9357 length += 4;
9358 }
9359 strcpy (buffer + length, get_tree_code_name (code));
9360 length += strlen (get_tree_code_name (code));
9361 }
9362 va_end (args);
9363
9364 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9365 buffer, get_tree_code_name (TREE_CODE (node)),
9366 function, trim_filename (file), line);
9367 }
9368
9369 /* Similar to tree_check_failed, except that we check for a class of tree
9370 code, given in CL. */
9371
9372 void
9373 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9374 const char *file, int line, const char *function)
9375 {
9376 internal_error
9377 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9378 TREE_CODE_CLASS_STRING (cl),
9379 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9380 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9381 }
9382
9383 /* Similar to tree_check_failed, except that instead of specifying a
9384 dozen codes, use the knowledge that they're all sequential. */
9385
9386 void
9387 tree_range_check_failed (const_tree node, const char *file, int line,
9388 const char *function, enum tree_code c1,
9389 enum tree_code c2)
9390 {
9391 char *buffer;
9392 unsigned length = 0;
9393 unsigned int c;
9394
9395 for (c = c1; c <= c2; ++c)
9396 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9397
9398 length += strlen ("expected ");
9399 buffer = (char *) alloca (length);
9400 length = 0;
9401
9402 for (c = c1; c <= c2; ++c)
9403 {
9404 const char *prefix = length ? " or " : "expected ";
9405
9406 strcpy (buffer + length, prefix);
9407 length += strlen (prefix);
9408 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9409 length += strlen (get_tree_code_name ((enum tree_code) c));
9410 }
9411
9412 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9413 buffer, get_tree_code_name (TREE_CODE (node)),
9414 function, trim_filename (file), line);
9415 }
9416
9417
9418 /* Similar to tree_check_failed, except that we check that a tree does
9419 not have the specified code, given in CL. */
9420
9421 void
9422 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9423 const char *file, int line, const char *function)
9424 {
9425 internal_error
9426 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9427 TREE_CODE_CLASS_STRING (cl),
9428 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9429 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9430 }
9431
9432
9433 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9434
9435 void
9436 omp_clause_check_failed (const_tree node, const char *file, int line,
9437 const char *function, enum omp_clause_code code)
9438 {
9439 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9440 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9441 function, trim_filename (file), line);
9442 }
9443
9444
9445 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9446
9447 void
9448 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9449 const char *function, enum omp_clause_code c1,
9450 enum omp_clause_code c2)
9451 {
9452 char *buffer;
9453 unsigned length = 0;
9454 unsigned int c;
9455
9456 for (c = c1; c <= c2; ++c)
9457 length += 4 + strlen (omp_clause_code_name[c]);
9458
9459 length += strlen ("expected ");
9460 buffer = (char *) alloca (length);
9461 length = 0;
9462
9463 for (c = c1; c <= c2; ++c)
9464 {
9465 const char *prefix = length ? " or " : "expected ";
9466
9467 strcpy (buffer + length, prefix);
9468 length += strlen (prefix);
9469 strcpy (buffer + length, omp_clause_code_name[c]);
9470 length += strlen (omp_clause_code_name[c]);
9471 }
9472
9473 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9474 buffer, omp_clause_code_name[TREE_CODE (node)],
9475 function, trim_filename (file), line);
9476 }
9477
9478
9479 #undef DEFTREESTRUCT
9480 #define DEFTREESTRUCT(VAL, NAME) NAME,
9481
9482 static const char *ts_enum_names[] = {
9483 #include "treestruct.def"
9484 };
9485 #undef DEFTREESTRUCT
9486
9487 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9488
9489 /* Similar to tree_class_check_failed, except that we check for
9490 whether CODE contains the tree structure identified by EN. */
9491
9492 void
9493 tree_contains_struct_check_failed (const_tree node,
9494 const enum tree_node_structure_enum en,
9495 const char *file, int line,
9496 const char *function)
9497 {
9498 internal_error
9499 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9500 TS_ENUM_NAME (en),
9501 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9502 }
9503
9504
9505 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9506 (dynamically sized) vector. */
9507
9508 void
9509 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9510 const char *function)
9511 {
9512 internal_error
9513 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9514 idx + 1, len, function, trim_filename (file), line);
9515 }
9516
9517 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9518 (dynamically sized) vector. */
9519
9520 void
9521 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9522 const char *function)
9523 {
9524 internal_error
9525 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9526 idx + 1, len, function, trim_filename (file), line);
9527 }
9528
9529 /* Similar to above, except that the check is for the bounds of the operand
9530 vector of an expression node EXP. */
9531
9532 void
9533 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9534 int line, const char *function)
9535 {
9536 enum tree_code code = TREE_CODE (exp);
9537 internal_error
9538 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9539 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9540 function, trim_filename (file), line);
9541 }
9542
9543 /* Similar to above, except that the check is for the number of
9544 operands of an OMP_CLAUSE node. */
9545
9546 void
9547 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9548 int line, const char *function)
9549 {
9550 internal_error
9551 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9552 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9553 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9554 trim_filename (file), line);
9555 }
9556 #endif /* ENABLE_TREE_CHECKING */
9557 \f
9558 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9559 and mapped to the machine mode MODE. Initialize its fields and build
9560 the information necessary for debugging output. */
9561
9562 static tree
9563 make_vector_type (tree innertype, int nunits, enum machine_mode mode)
9564 {
9565 tree t;
9566 hashval_t hashcode = 0;
9567
9568 t = make_node (VECTOR_TYPE);
9569 TREE_TYPE (t) = TYPE_MAIN_VARIANT (innertype);
9570 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9571 SET_TYPE_MODE (t, mode);
9572
9573 if (TYPE_STRUCTURAL_EQUALITY_P (innertype))
9574 SET_TYPE_STRUCTURAL_EQUALITY (t);
9575 else if (TYPE_CANONICAL (innertype) != innertype
9576 || mode != VOIDmode)
9577 TYPE_CANONICAL (t)
9578 = make_vector_type (TYPE_CANONICAL (innertype), nunits, VOIDmode);
9579
9580 layout_type (t);
9581
9582 hashcode = iterative_hash_host_wide_int (VECTOR_TYPE, hashcode);
9583 hashcode = iterative_hash_host_wide_int (nunits, hashcode);
9584 hashcode = iterative_hash_host_wide_int (mode, hashcode);
9585 hashcode = iterative_hash_object (TYPE_HASH (TREE_TYPE (t)), hashcode);
9586 t = type_hash_canon (hashcode, t);
9587
9588 /* We have built a main variant, based on the main variant of the
9589 inner type. Use it to build the variant we return. */
9590 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9591 && TREE_TYPE (t) != innertype)
9592 return build_type_attribute_qual_variant (t,
9593 TYPE_ATTRIBUTES (innertype),
9594 TYPE_QUALS (innertype));
9595
9596 return t;
9597 }
9598
9599 static tree
9600 make_or_reuse_type (unsigned size, int unsignedp)
9601 {
9602 if (size == INT_TYPE_SIZE)
9603 return unsignedp ? unsigned_type_node : integer_type_node;
9604 if (size == CHAR_TYPE_SIZE)
9605 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9606 if (size == SHORT_TYPE_SIZE)
9607 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9608 if (size == LONG_TYPE_SIZE)
9609 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9610 if (size == LONG_LONG_TYPE_SIZE)
9611 return (unsignedp ? long_long_unsigned_type_node
9612 : long_long_integer_type_node);
9613 if (size == 128 && int128_integer_type_node)
9614 return (unsignedp ? int128_unsigned_type_node
9615 : int128_integer_type_node);
9616
9617 if (unsignedp)
9618 return make_unsigned_type (size);
9619 else
9620 return make_signed_type (size);
9621 }
9622
9623 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9624
9625 static tree
9626 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9627 {
9628 if (satp)
9629 {
9630 if (size == SHORT_FRACT_TYPE_SIZE)
9631 return unsignedp ? sat_unsigned_short_fract_type_node
9632 : sat_short_fract_type_node;
9633 if (size == FRACT_TYPE_SIZE)
9634 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9635 if (size == LONG_FRACT_TYPE_SIZE)
9636 return unsignedp ? sat_unsigned_long_fract_type_node
9637 : sat_long_fract_type_node;
9638 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9639 return unsignedp ? sat_unsigned_long_long_fract_type_node
9640 : sat_long_long_fract_type_node;
9641 }
9642 else
9643 {
9644 if (size == SHORT_FRACT_TYPE_SIZE)
9645 return unsignedp ? unsigned_short_fract_type_node
9646 : short_fract_type_node;
9647 if (size == FRACT_TYPE_SIZE)
9648 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9649 if (size == LONG_FRACT_TYPE_SIZE)
9650 return unsignedp ? unsigned_long_fract_type_node
9651 : long_fract_type_node;
9652 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9653 return unsignedp ? unsigned_long_long_fract_type_node
9654 : long_long_fract_type_node;
9655 }
9656
9657 return make_fract_type (size, unsignedp, satp);
9658 }
9659
9660 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9661
9662 static tree
9663 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9664 {
9665 if (satp)
9666 {
9667 if (size == SHORT_ACCUM_TYPE_SIZE)
9668 return unsignedp ? sat_unsigned_short_accum_type_node
9669 : sat_short_accum_type_node;
9670 if (size == ACCUM_TYPE_SIZE)
9671 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9672 if (size == LONG_ACCUM_TYPE_SIZE)
9673 return unsignedp ? sat_unsigned_long_accum_type_node
9674 : sat_long_accum_type_node;
9675 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9676 return unsignedp ? sat_unsigned_long_long_accum_type_node
9677 : sat_long_long_accum_type_node;
9678 }
9679 else
9680 {
9681 if (size == SHORT_ACCUM_TYPE_SIZE)
9682 return unsignedp ? unsigned_short_accum_type_node
9683 : short_accum_type_node;
9684 if (size == ACCUM_TYPE_SIZE)
9685 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9686 if (size == LONG_ACCUM_TYPE_SIZE)
9687 return unsignedp ? unsigned_long_accum_type_node
9688 : long_accum_type_node;
9689 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9690 return unsignedp ? unsigned_long_long_accum_type_node
9691 : long_long_accum_type_node;
9692 }
9693
9694 return make_accum_type (size, unsignedp, satp);
9695 }
9696
9697 /* Create nodes for all integer types (and error_mark_node) using the sizes
9698 of C datatypes. SIGNED_CHAR specifies whether char is signed,
9699 SHORT_DOUBLE specifies whether double should be of the same precision
9700 as float. */
9701
9702 void
9703 build_common_tree_nodes (bool signed_char, bool short_double)
9704 {
9705 error_mark_node = make_node (ERROR_MARK);
9706 TREE_TYPE (error_mark_node) = error_mark_node;
9707
9708 initialize_sizetypes ();
9709
9710 /* Define both `signed char' and `unsigned char'. */
9711 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9712 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9713 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9714 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9715
9716 /* Define `char', which is like either `signed char' or `unsigned char'
9717 but not the same as either. */
9718 char_type_node
9719 = (signed_char
9720 ? make_signed_type (CHAR_TYPE_SIZE)
9721 : make_unsigned_type (CHAR_TYPE_SIZE));
9722 TYPE_STRING_FLAG (char_type_node) = 1;
9723
9724 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9725 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9726 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9727 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9728 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9729 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9730 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9731 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9732 #if HOST_BITS_PER_WIDE_INT >= 64
9733 /* TODO: This isn't correct, but as logic depends at the moment on
9734 host's instead of target's wide-integer.
9735 If there is a target not supporting TImode, but has an 128-bit
9736 integer-scalar register, this target check needs to be adjusted. */
9737 if (targetm.scalar_mode_supported_p (TImode))
9738 {
9739 int128_integer_type_node = make_signed_type (128);
9740 int128_unsigned_type_node = make_unsigned_type (128);
9741 }
9742 #endif
9743
9744 /* Define a boolean type. This type only represents boolean values but
9745 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9746 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9747 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9748 TYPE_PRECISION (boolean_type_node) = 1;
9749 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9750
9751 /* Define what type to use for size_t. */
9752 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9753 size_type_node = unsigned_type_node;
9754 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9755 size_type_node = long_unsigned_type_node;
9756 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9757 size_type_node = long_long_unsigned_type_node;
9758 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9759 size_type_node = short_unsigned_type_node;
9760 else
9761 gcc_unreachable ();
9762
9763 /* Fill in the rest of the sized types. Reuse existing type nodes
9764 when possible. */
9765 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9766 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9767 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9768 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9769 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9770
9771 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9772 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9773 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9774 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9775 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9776
9777 access_public_node = get_identifier ("public");
9778 access_protected_node = get_identifier ("protected");
9779 access_private_node = get_identifier ("private");
9780
9781 /* Define these next since types below may used them. */
9782 integer_zero_node = build_int_cst (integer_type_node, 0);
9783 integer_one_node = build_int_cst (integer_type_node, 1);
9784 integer_three_node = build_int_cst (integer_type_node, 3);
9785 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9786
9787 size_zero_node = size_int (0);
9788 size_one_node = size_int (1);
9789 bitsize_zero_node = bitsize_int (0);
9790 bitsize_one_node = bitsize_int (1);
9791 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9792
9793 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9794 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9795
9796 void_type_node = make_node (VOID_TYPE);
9797 layout_type (void_type_node);
9798
9799 pointer_bounds_type_node = targetm.chkp_bound_type ();
9800
9801 /* We are not going to have real types in C with less than byte alignment,
9802 so we might as well not have any types that claim to have it. */
9803 TYPE_ALIGN (void_type_node) = BITS_PER_UNIT;
9804 TYPE_USER_ALIGN (void_type_node) = 0;
9805
9806 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9807 layout_type (TREE_TYPE (null_pointer_node));
9808
9809 ptr_type_node = build_pointer_type (void_type_node);
9810 const_ptr_type_node
9811 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9812 fileptr_type_node = ptr_type_node;
9813
9814 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9815
9816 float_type_node = make_node (REAL_TYPE);
9817 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9818 layout_type (float_type_node);
9819
9820 double_type_node = make_node (REAL_TYPE);
9821 if (short_double)
9822 TYPE_PRECISION (double_type_node) = FLOAT_TYPE_SIZE;
9823 else
9824 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9825 layout_type (double_type_node);
9826
9827 long_double_type_node = make_node (REAL_TYPE);
9828 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9829 layout_type (long_double_type_node);
9830
9831 float_ptr_type_node = build_pointer_type (float_type_node);
9832 double_ptr_type_node = build_pointer_type (double_type_node);
9833 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9834 integer_ptr_type_node = build_pointer_type (integer_type_node);
9835
9836 /* Fixed size integer types. */
9837 uint16_type_node = build_nonstandard_integer_type (16, true);
9838 uint32_type_node = build_nonstandard_integer_type (32, true);
9839 uint64_type_node = build_nonstandard_integer_type (64, true);
9840
9841 /* Decimal float types. */
9842 dfloat32_type_node = make_node (REAL_TYPE);
9843 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9844 layout_type (dfloat32_type_node);
9845 SET_TYPE_MODE (dfloat32_type_node, SDmode);
9846 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
9847
9848 dfloat64_type_node = make_node (REAL_TYPE);
9849 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9850 layout_type (dfloat64_type_node);
9851 SET_TYPE_MODE (dfloat64_type_node, DDmode);
9852 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
9853
9854 dfloat128_type_node = make_node (REAL_TYPE);
9855 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9856 layout_type (dfloat128_type_node);
9857 SET_TYPE_MODE (dfloat128_type_node, TDmode);
9858 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
9859
9860 complex_integer_type_node = build_complex_type (integer_type_node);
9861 complex_float_type_node = build_complex_type (float_type_node);
9862 complex_double_type_node = build_complex_type (double_type_node);
9863 complex_long_double_type_node = build_complex_type (long_double_type_node);
9864
9865 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9866 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9867 sat_ ## KIND ## _type_node = \
9868 make_sat_signed_ ## KIND ## _type (SIZE); \
9869 sat_unsigned_ ## KIND ## _type_node = \
9870 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9871 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9872 unsigned_ ## KIND ## _type_node = \
9873 make_unsigned_ ## KIND ## _type (SIZE);
9874
9875 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9876 sat_ ## WIDTH ## KIND ## _type_node = \
9877 make_sat_signed_ ## KIND ## _type (SIZE); \
9878 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9879 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9880 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9881 unsigned_ ## WIDTH ## KIND ## _type_node = \
9882 make_unsigned_ ## KIND ## _type (SIZE);
9883
9884 /* Make fixed-point type nodes based on four different widths. */
9885 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9886 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9887 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9888 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9889 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9890
9891 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9892 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9893 NAME ## _type_node = \
9894 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9895 u ## NAME ## _type_node = \
9896 make_or_reuse_unsigned_ ## KIND ## _type \
9897 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9898 sat_ ## NAME ## _type_node = \
9899 make_or_reuse_sat_signed_ ## KIND ## _type \
9900 (GET_MODE_BITSIZE (MODE ## mode)); \
9901 sat_u ## NAME ## _type_node = \
9902 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9903 (GET_MODE_BITSIZE (U ## MODE ## mode));
9904
9905 /* Fixed-point type and mode nodes. */
9906 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
9907 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
9908 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
9909 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
9910 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
9911 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
9912 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
9913 MAKE_FIXED_MODE_NODE (accum, ha, HA)
9914 MAKE_FIXED_MODE_NODE (accum, sa, SA)
9915 MAKE_FIXED_MODE_NODE (accum, da, DA)
9916 MAKE_FIXED_MODE_NODE (accum, ta, TA)
9917
9918 {
9919 tree t = targetm.build_builtin_va_list ();
9920
9921 /* Many back-ends define record types without setting TYPE_NAME.
9922 If we copied the record type here, we'd keep the original
9923 record type without a name. This breaks name mangling. So,
9924 don't copy record types and let c_common_nodes_and_builtins()
9925 declare the type to be __builtin_va_list. */
9926 if (TREE_CODE (t) != RECORD_TYPE)
9927 t = build_variant_type_copy (t);
9928
9929 va_list_type_node = t;
9930 }
9931 }
9932
9933 /* Modify DECL for given flags.
9934 TM_PURE attribute is set only on types, so the function will modify
9935 DECL's type when ECF_TM_PURE is used. */
9936
9937 void
9938 set_call_expr_flags (tree decl, int flags)
9939 {
9940 if (flags & ECF_NOTHROW)
9941 TREE_NOTHROW (decl) = 1;
9942 if (flags & ECF_CONST)
9943 TREE_READONLY (decl) = 1;
9944 if (flags & ECF_PURE)
9945 DECL_PURE_P (decl) = 1;
9946 if (flags & ECF_LOOPING_CONST_OR_PURE)
9947 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
9948 if (flags & ECF_NOVOPS)
9949 DECL_IS_NOVOPS (decl) = 1;
9950 if (flags & ECF_NORETURN)
9951 TREE_THIS_VOLATILE (decl) = 1;
9952 if (flags & ECF_MALLOC)
9953 DECL_IS_MALLOC (decl) = 1;
9954 if (flags & ECF_RETURNS_TWICE)
9955 DECL_IS_RETURNS_TWICE (decl) = 1;
9956 if (flags & ECF_LEAF)
9957 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
9958 NULL, DECL_ATTRIBUTES (decl));
9959 if ((flags & ECF_TM_PURE) && flag_tm)
9960 apply_tm_attr (decl, get_identifier ("transaction_pure"));
9961 /* Looping const or pure is implied by noreturn.
9962 There is currently no way to declare looping const or looping pure alone. */
9963 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
9964 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
9965 }
9966
9967
9968 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9969
9970 static void
9971 local_define_builtin (const char *name, tree type, enum built_in_function code,
9972 const char *library_name, int ecf_flags)
9973 {
9974 tree decl;
9975
9976 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
9977 library_name, NULL_TREE);
9978 set_call_expr_flags (decl, ecf_flags);
9979
9980 set_builtin_decl (code, decl, true);
9981 }
9982
9983 /* Call this function after instantiating all builtins that the language
9984 front end cares about. This will build the rest of the builtins that
9985 are relied upon by the tree optimizers and the middle-end. */
9986
9987 void
9988 build_common_builtin_nodes (void)
9989 {
9990 tree tmp, ftype;
9991 int ecf_flags;
9992
9993 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
9994 {
9995 ftype = build_function_type (void_type_node, void_list_node);
9996 local_define_builtin ("__builtin_unreachable", ftype, BUILT_IN_UNREACHABLE,
9997 "__builtin_unreachable",
9998 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
9999 | ECF_CONST | ECF_LEAF);
10000 }
10001
10002 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
10003 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10004 {
10005 ftype = build_function_type_list (ptr_type_node,
10006 ptr_type_node, const_ptr_type_node,
10007 size_type_node, NULL_TREE);
10008
10009 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
10010 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
10011 "memcpy", ECF_NOTHROW | ECF_LEAF);
10012 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10013 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
10014 "memmove", ECF_NOTHROW | ECF_LEAF);
10015 }
10016
10017 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
10018 {
10019 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10020 const_ptr_type_node, size_type_node,
10021 NULL_TREE);
10022 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
10023 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10024 }
10025
10026 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
10027 {
10028 ftype = build_function_type_list (ptr_type_node,
10029 ptr_type_node, integer_type_node,
10030 size_type_node, NULL_TREE);
10031 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
10032 "memset", ECF_NOTHROW | ECF_LEAF);
10033 }
10034
10035 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
10036 {
10037 ftype = build_function_type_list (ptr_type_node,
10038 size_type_node, NULL_TREE);
10039 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
10040 "alloca", ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
10041 }
10042
10043 ftype = build_function_type_list (ptr_type_node, size_type_node,
10044 size_type_node, NULL_TREE);
10045 local_define_builtin ("__builtin_alloca_with_align", ftype,
10046 BUILT_IN_ALLOCA_WITH_ALIGN, "alloca",
10047 ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
10048
10049 /* If we're checking the stack, `alloca' can throw. */
10050 if (flag_stack_check)
10051 {
10052 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA)) = 0;
10053 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN)) = 0;
10054 }
10055
10056 ftype = build_function_type_list (void_type_node,
10057 ptr_type_node, ptr_type_node,
10058 ptr_type_node, NULL_TREE);
10059 local_define_builtin ("__builtin_init_trampoline", ftype,
10060 BUILT_IN_INIT_TRAMPOLINE,
10061 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
10062 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
10063 BUILT_IN_INIT_HEAP_TRAMPOLINE,
10064 "__builtin_init_heap_trampoline",
10065 ECF_NOTHROW | ECF_LEAF);
10066
10067 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
10068 local_define_builtin ("__builtin_adjust_trampoline", ftype,
10069 BUILT_IN_ADJUST_TRAMPOLINE,
10070 "__builtin_adjust_trampoline",
10071 ECF_CONST | ECF_NOTHROW);
10072
10073 ftype = build_function_type_list (void_type_node,
10074 ptr_type_node, ptr_type_node, NULL_TREE);
10075 local_define_builtin ("__builtin_nonlocal_goto", ftype,
10076 BUILT_IN_NONLOCAL_GOTO,
10077 "__builtin_nonlocal_goto",
10078 ECF_NORETURN | ECF_NOTHROW);
10079
10080 ftype = build_function_type_list (void_type_node,
10081 ptr_type_node, ptr_type_node, NULL_TREE);
10082 local_define_builtin ("__builtin_setjmp_setup", ftype,
10083 BUILT_IN_SETJMP_SETUP,
10084 "__builtin_setjmp_setup", ECF_NOTHROW);
10085
10086 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
10087 local_define_builtin ("__builtin_setjmp_dispatcher", ftype,
10088 BUILT_IN_SETJMP_DISPATCHER,
10089 "__builtin_setjmp_dispatcher",
10090 ECF_PURE | ECF_NOTHROW);
10091
10092 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10093 local_define_builtin ("__builtin_setjmp_receiver", ftype,
10094 BUILT_IN_SETJMP_RECEIVER,
10095 "__builtin_setjmp_receiver", ECF_NOTHROW);
10096
10097 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
10098 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
10099 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
10100
10101 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10102 local_define_builtin ("__builtin_stack_restore", ftype,
10103 BUILT_IN_STACK_RESTORE,
10104 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
10105
10106 /* If there's a possibility that we might use the ARM EABI, build the
10107 alternate __cxa_end_cleanup node used to resume from C++ and Java. */
10108 if (targetm.arm_eabi_unwinder)
10109 {
10110 ftype = build_function_type_list (void_type_node, NULL_TREE);
10111 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10112 BUILT_IN_CXA_END_CLEANUP,
10113 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10114 }
10115
10116 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10117 local_define_builtin ("__builtin_unwind_resume", ftype,
10118 BUILT_IN_UNWIND_RESUME,
10119 ((targetm_common.except_unwind_info (&global_options)
10120 == UI_SJLJ)
10121 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10122 ECF_NORETURN);
10123
10124 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10125 {
10126 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10127 NULL_TREE);
10128 local_define_builtin ("__builtin_return_address", ftype,
10129 BUILT_IN_RETURN_ADDRESS,
10130 "__builtin_return_address",
10131 ECF_NOTHROW);
10132 }
10133
10134 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10135 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10136 {
10137 ftype = build_function_type_list (void_type_node, ptr_type_node,
10138 ptr_type_node, NULL_TREE);
10139 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10140 local_define_builtin ("__cyg_profile_func_enter", ftype,
10141 BUILT_IN_PROFILE_FUNC_ENTER,
10142 "__cyg_profile_func_enter", 0);
10143 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10144 local_define_builtin ("__cyg_profile_func_exit", ftype,
10145 BUILT_IN_PROFILE_FUNC_EXIT,
10146 "__cyg_profile_func_exit", 0);
10147 }
10148
10149 /* The exception object and filter values from the runtime. The argument
10150 must be zero before exception lowering, i.e. from the front end. After
10151 exception lowering, it will be the region number for the exception
10152 landing pad. These functions are PURE instead of CONST to prevent
10153 them from being hoisted past the exception edge that will initialize
10154 its value in the landing pad. */
10155 ftype = build_function_type_list (ptr_type_node,
10156 integer_type_node, NULL_TREE);
10157 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10158 /* Only use TM_PURE if we we have TM language support. */
10159 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10160 ecf_flags |= ECF_TM_PURE;
10161 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10162 "__builtin_eh_pointer", ecf_flags);
10163
10164 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10165 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10166 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10167 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10168
10169 ftype = build_function_type_list (void_type_node,
10170 integer_type_node, integer_type_node,
10171 NULL_TREE);
10172 local_define_builtin ("__builtin_eh_copy_values", ftype,
10173 BUILT_IN_EH_COPY_VALUES,
10174 "__builtin_eh_copy_values", ECF_NOTHROW);
10175
10176 /* Complex multiplication and division. These are handled as builtins
10177 rather than optabs because emit_library_call_value doesn't support
10178 complex. Further, we can do slightly better with folding these
10179 beasties if the real and complex parts of the arguments are separate. */
10180 {
10181 int mode;
10182
10183 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10184 {
10185 char mode_name_buf[4], *q;
10186 const char *p;
10187 enum built_in_function mcode, dcode;
10188 tree type, inner_type;
10189 const char *prefix = "__";
10190
10191 if (targetm.libfunc_gnu_prefix)
10192 prefix = "__gnu_";
10193
10194 type = lang_hooks.types.type_for_mode ((enum machine_mode) mode, 0);
10195 if (type == NULL)
10196 continue;
10197 inner_type = TREE_TYPE (type);
10198
10199 ftype = build_function_type_list (type, inner_type, inner_type,
10200 inner_type, inner_type, NULL_TREE);
10201
10202 mcode = ((enum built_in_function)
10203 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10204 dcode = ((enum built_in_function)
10205 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10206
10207 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10208 *q = TOLOWER (*p);
10209 *q = '\0';
10210
10211 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10212 NULL);
10213 local_define_builtin (built_in_names[mcode], ftype, mcode,
10214 built_in_names[mcode],
10215 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10216
10217 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10218 NULL);
10219 local_define_builtin (built_in_names[dcode], ftype, dcode,
10220 built_in_names[dcode],
10221 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10222 }
10223 }
10224 }
10225
10226 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10227 better way.
10228
10229 If we requested a pointer to a vector, build up the pointers that
10230 we stripped off while looking for the inner type. Similarly for
10231 return values from functions.
10232
10233 The argument TYPE is the top of the chain, and BOTTOM is the
10234 new type which we will point to. */
10235
10236 tree
10237 reconstruct_complex_type (tree type, tree bottom)
10238 {
10239 tree inner, outer;
10240
10241 if (TREE_CODE (type) == POINTER_TYPE)
10242 {
10243 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10244 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10245 TYPE_REF_CAN_ALIAS_ALL (type));
10246 }
10247 else if (TREE_CODE (type) == REFERENCE_TYPE)
10248 {
10249 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10250 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10251 TYPE_REF_CAN_ALIAS_ALL (type));
10252 }
10253 else if (TREE_CODE (type) == ARRAY_TYPE)
10254 {
10255 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10256 outer = build_array_type (inner, TYPE_DOMAIN (type));
10257 }
10258 else if (TREE_CODE (type) == FUNCTION_TYPE)
10259 {
10260 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10261 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10262 }
10263 else if (TREE_CODE (type) == METHOD_TYPE)
10264 {
10265 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10266 /* The build_method_type_directly() routine prepends 'this' to argument list,
10267 so we must compensate by getting rid of it. */
10268 outer
10269 = build_method_type_directly
10270 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10271 inner,
10272 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10273 }
10274 else if (TREE_CODE (type) == OFFSET_TYPE)
10275 {
10276 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10277 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10278 }
10279 else
10280 return bottom;
10281
10282 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10283 TYPE_QUALS (type));
10284 }
10285
10286 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10287 the inner type. */
10288 tree
10289 build_vector_type_for_mode (tree innertype, enum machine_mode mode)
10290 {
10291 int nunits;
10292
10293 switch (GET_MODE_CLASS (mode))
10294 {
10295 case MODE_VECTOR_INT:
10296 case MODE_VECTOR_FLOAT:
10297 case MODE_VECTOR_FRACT:
10298 case MODE_VECTOR_UFRACT:
10299 case MODE_VECTOR_ACCUM:
10300 case MODE_VECTOR_UACCUM:
10301 nunits = GET_MODE_NUNITS (mode);
10302 break;
10303
10304 case MODE_INT:
10305 /* Check that there are no leftover bits. */
10306 gcc_assert (GET_MODE_BITSIZE (mode)
10307 % tree_to_hwi (TYPE_SIZE (innertype)) == 0);
10308
10309 nunits = GET_MODE_BITSIZE (mode)
10310 / tree_to_hwi (TYPE_SIZE (innertype));
10311 break;
10312
10313 default:
10314 gcc_unreachable ();
10315 }
10316
10317 return make_vector_type (innertype, nunits, mode);
10318 }
10319
10320 /* Similarly, but takes the inner type and number of units, which must be
10321 a power of two. */
10322
10323 tree
10324 build_vector_type (tree innertype, int nunits)
10325 {
10326 return make_vector_type (innertype, nunits, VOIDmode);
10327 }
10328
10329 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10330
10331 tree
10332 build_opaque_vector_type (tree innertype, int nunits)
10333 {
10334 tree t = make_vector_type (innertype, nunits, VOIDmode);
10335 tree cand;
10336 /* We always build the non-opaque variant before the opaque one,
10337 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10338 cand = TYPE_NEXT_VARIANT (t);
10339 if (cand
10340 && TYPE_VECTOR_OPAQUE (cand)
10341 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10342 return cand;
10343 /* Othewise build a variant type and make sure to queue it after
10344 the non-opaque type. */
10345 cand = build_distinct_type_copy (t);
10346 TYPE_VECTOR_OPAQUE (cand) = true;
10347 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10348 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10349 TYPE_NEXT_VARIANT (t) = cand;
10350 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10351 return cand;
10352 }
10353
10354
10355 /* Given an initializer INIT, return TRUE if INIT is zero or some
10356 aggregate of zeros. Otherwise return FALSE. */
10357 bool
10358 initializer_zerop (const_tree init)
10359 {
10360 tree elt;
10361
10362 STRIP_NOPS (init);
10363
10364 switch (TREE_CODE (init))
10365 {
10366 case INTEGER_CST:
10367 return integer_zerop (init);
10368
10369 case REAL_CST:
10370 /* ??? Note that this is not correct for C4X float formats. There,
10371 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10372 negative exponent. */
10373 return real_zerop (init)
10374 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
10375
10376 case FIXED_CST:
10377 return fixed_zerop (init);
10378
10379 case COMPLEX_CST:
10380 return integer_zerop (init)
10381 || (real_zerop (init)
10382 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10383 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
10384
10385 case VECTOR_CST:
10386 {
10387 unsigned i;
10388 for (i = 0; i < VECTOR_CST_NELTS (init); ++i)
10389 if (!initializer_zerop (VECTOR_CST_ELT (init, i)))
10390 return false;
10391 return true;
10392 }
10393
10394 case CONSTRUCTOR:
10395 {
10396 unsigned HOST_WIDE_INT idx;
10397
10398 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10399 if (!initializer_zerop (elt))
10400 return false;
10401 return true;
10402 }
10403
10404 case STRING_CST:
10405 {
10406 int i;
10407
10408 /* We need to loop through all elements to handle cases like
10409 "\0" and "\0foobar". */
10410 for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
10411 if (TREE_STRING_POINTER (init)[i] != '\0')
10412 return false;
10413
10414 return true;
10415 }
10416
10417 default:
10418 return false;
10419 }
10420 }
10421
10422 /* Check if vector VEC consists of all the equal elements and
10423 that the number of elements corresponds to the type of VEC.
10424 The function returns first element of the vector
10425 or NULL_TREE if the vector is not uniform. */
10426 tree
10427 uniform_vector_p (const_tree vec)
10428 {
10429 tree first, t;
10430 unsigned i;
10431
10432 if (vec == NULL_TREE)
10433 return NULL_TREE;
10434
10435 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10436
10437 if (TREE_CODE (vec) == VECTOR_CST)
10438 {
10439 first = VECTOR_CST_ELT (vec, 0);
10440 for (i = 1; i < VECTOR_CST_NELTS (vec); ++i)
10441 if (!operand_equal_p (first, VECTOR_CST_ELT (vec, i), 0))
10442 return NULL_TREE;
10443
10444 return first;
10445 }
10446
10447 else if (TREE_CODE (vec) == CONSTRUCTOR)
10448 {
10449 first = error_mark_node;
10450
10451 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10452 {
10453 if (i == 0)
10454 {
10455 first = t;
10456 continue;
10457 }
10458 if (!operand_equal_p (first, t, 0))
10459 return NULL_TREE;
10460 }
10461 if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)))
10462 return NULL_TREE;
10463
10464 return first;
10465 }
10466
10467 return NULL_TREE;
10468 }
10469
10470 /* Build an empty statement at location LOC. */
10471
10472 tree
10473 build_empty_stmt (location_t loc)
10474 {
10475 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10476 SET_EXPR_LOCATION (t, loc);
10477 return t;
10478 }
10479
10480
10481 /* Build an OpenMP clause with code CODE. LOC is the location of the
10482 clause. */
10483
10484 tree
10485 build_omp_clause (location_t loc, enum omp_clause_code code)
10486 {
10487 tree t;
10488 int size, length;
10489
10490 length = omp_clause_num_ops[code];
10491 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10492
10493 record_node_allocation_statistics (OMP_CLAUSE, size);
10494
10495 t = ggc_alloc_tree_node (size);
10496 memset (t, 0, size);
10497 TREE_SET_CODE (t, OMP_CLAUSE);
10498 OMP_CLAUSE_SET_CODE (t, code);
10499 OMP_CLAUSE_LOCATION (t) = loc;
10500
10501 return t;
10502 }
10503
10504 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10505 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10506 Except for the CODE and operand count field, other storage for the
10507 object is initialized to zeros. */
10508
10509 tree
10510 build_vl_exp_stat (enum tree_code code, int len MEM_STAT_DECL)
10511 {
10512 tree t;
10513 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10514
10515 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10516 gcc_assert (len >= 1);
10517
10518 record_node_allocation_statistics (code, length);
10519
10520 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10521
10522 TREE_SET_CODE (t, code);
10523
10524 /* Can't use TREE_OPERAND to store the length because if checking is
10525 enabled, it will try to check the length before we store it. :-P */
10526 t->exp.operands[0] = build_int_cst (sizetype, len);
10527
10528 return t;
10529 }
10530
10531 /* Helper function for build_call_* functions; build a CALL_EXPR with
10532 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10533 the argument slots. */
10534
10535 static tree
10536 build_call_1 (tree return_type, tree fn, int nargs)
10537 {
10538 tree t;
10539
10540 t = build_vl_exp (CALL_EXPR, nargs + 3);
10541 TREE_TYPE (t) = return_type;
10542 CALL_EXPR_FN (t) = fn;
10543 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10544
10545 return t;
10546 }
10547
10548 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10549 FN and a null static chain slot. NARGS is the number of call arguments
10550 which are specified as "..." arguments. */
10551
10552 tree
10553 build_call_nary (tree return_type, tree fn, int nargs, ...)
10554 {
10555 tree ret;
10556 va_list args;
10557 va_start (args, nargs);
10558 ret = build_call_valist (return_type, fn, nargs, args);
10559 va_end (args);
10560 return ret;
10561 }
10562
10563 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10564 FN and a null static chain slot. NARGS is the number of call arguments
10565 which are specified as a va_list ARGS. */
10566
10567 tree
10568 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10569 {
10570 tree t;
10571 int i;
10572
10573 t = build_call_1 (return_type, fn, nargs);
10574 for (i = 0; i < nargs; i++)
10575 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10576 process_call_operands (t);
10577 return t;
10578 }
10579
10580 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10581 FN and a null static chain slot. NARGS is the number of call arguments
10582 which are specified as a tree array ARGS. */
10583
10584 tree
10585 build_call_array_loc (location_t loc, tree return_type, tree fn,
10586 int nargs, const tree *args)
10587 {
10588 tree t;
10589 int i;
10590
10591 t = build_call_1 (return_type, fn, nargs);
10592 for (i = 0; i < nargs; i++)
10593 CALL_EXPR_ARG (t, i) = args[i];
10594 process_call_operands (t);
10595 SET_EXPR_LOCATION (t, loc);
10596 return t;
10597 }
10598
10599 /* Like build_call_array, but takes a vec. */
10600
10601 tree
10602 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
10603 {
10604 tree ret, t;
10605 unsigned int ix;
10606
10607 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10608 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10609 CALL_EXPR_ARG (ret, ix) = t;
10610 process_call_operands (ret);
10611 return ret;
10612 }
10613
10614
10615 /* Returns true if it is possible to prove that the index of
10616 an array access REF (an ARRAY_REF expression) falls into the
10617 array bounds. */
10618
10619 bool
10620 in_array_bounds_p (tree ref)
10621 {
10622 tree idx = TREE_OPERAND (ref, 1);
10623 tree min, max;
10624
10625 if (TREE_CODE (idx) != INTEGER_CST)
10626 return false;
10627
10628 min = array_ref_low_bound (ref);
10629 max = array_ref_up_bound (ref);
10630 if (!min
10631 || !max
10632 || TREE_CODE (min) != INTEGER_CST
10633 || TREE_CODE (max) != INTEGER_CST)
10634 return false;
10635
10636 if (tree_int_cst_lt (idx, min)
10637 || tree_int_cst_lt (max, idx))
10638 return false;
10639
10640 return true;
10641 }
10642
10643 /* Returns true if it is possible to prove that the range of
10644 an array access REF (an ARRAY_RANGE_REF expression) falls
10645 into the array bounds. */
10646
10647 bool
10648 range_in_array_bounds_p (tree ref)
10649 {
10650 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ref));
10651 tree range_min, range_max, min, max;
10652
10653 range_min = TYPE_MIN_VALUE (domain_type);
10654 range_max = TYPE_MAX_VALUE (domain_type);
10655 if (!range_min
10656 || !range_max
10657 || TREE_CODE (range_min) != INTEGER_CST
10658 || TREE_CODE (range_max) != INTEGER_CST)
10659 return false;
10660
10661 min = array_ref_low_bound (ref);
10662 max = array_ref_up_bound (ref);
10663 if (!min
10664 || !max
10665 || TREE_CODE (min) != INTEGER_CST
10666 || TREE_CODE (max) != INTEGER_CST)
10667 return false;
10668
10669 if (tree_int_cst_lt (range_min, min)
10670 || tree_int_cst_lt (max, range_max))
10671 return false;
10672
10673 return true;
10674 }
10675
10676 /* Return true if T (assumed to be a DECL) must be assigned a memory
10677 location. */
10678
10679 bool
10680 needs_to_live_in_memory (const_tree t)
10681 {
10682 return (TREE_ADDRESSABLE (t)
10683 || is_global_var (t)
10684 || (TREE_CODE (t) == RESULT_DECL
10685 && !DECL_BY_REFERENCE (t)
10686 && aggregate_value_p (t, current_function_decl)));
10687 }
10688
10689 /* Return value of a constant X and sign-extend it. */
10690
10691 HOST_WIDE_INT
10692 int_cst_value (const_tree x)
10693 {
10694 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10695 unsigned HOST_WIDE_INT val = tree_to_hwi (x);
10696
10697 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10698 gcc_assert (cst_fits_shwi_p (x));
10699
10700 if (bits < HOST_BITS_PER_WIDE_INT)
10701 {
10702 bool negative = ((val >> (bits - 1)) & 1) != 0;
10703 if (negative)
10704 val |= (~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1;
10705 else
10706 val &= ~((~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1);
10707 }
10708
10709 return val;
10710 }
10711
10712 /* Return value of a constant X and sign-extend it. */
10713
10714 HOST_WIDEST_INT
10715 widest_int_cst_value (const_tree x)
10716 {
10717 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10718 unsigned HOST_WIDEST_INT val = tree_to_hwi (x);
10719
10720 #if HOST_BITS_PER_WIDEST_INT > HOST_BITS_PER_WIDE_INT
10721 gcc_assert (HOST_BITS_PER_WIDEST_INT >= HOST_BITS_PER_DOUBLE_INT);
10722 gcc_assert (TREE_INT_CST_NUNITS (x) == 2);
10723
10724 if (TREE_INT_CST_NUNITS (x) == 1)
10725 val = HOST_WIDE_INT (val);
10726 else
10727 val |= (((unsigned HOST_WIDEST_INT) TREE_INT_CST_ELT (x, 1))
10728 << HOST_BITS_PER_WIDE_INT);
10729 #else
10730 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10731 gcc_assert (TREE_INT_CST_NUNITS (x) == 1);
10732 #endif
10733
10734 if (bits < HOST_BITS_PER_WIDEST_INT)
10735 {
10736 bool negative = ((val >> (bits - 1)) & 1) != 0;
10737 if (negative)
10738 val |= (~(unsigned HOST_WIDEST_INT) 0) << (bits - 1) << 1;
10739 else
10740 val &= ~((~(unsigned HOST_WIDEST_INT) 0) << (bits - 1) << 1);
10741 }
10742
10743 return val;
10744 }
10745
10746 /* If TYPE is an integral or pointer type, return an integer type with
10747 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10748 if TYPE is already an integer type of signedness UNSIGNEDP. */
10749
10750 tree
10751 signed_or_unsigned_type_for (int unsignedp, tree type)
10752 {
10753 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
10754 return type;
10755
10756 if (TREE_CODE (type) == VECTOR_TYPE)
10757 {
10758 tree inner = TREE_TYPE (type);
10759 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10760 if (!inner2)
10761 return NULL_TREE;
10762 if (inner == inner2)
10763 return type;
10764 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
10765 }
10766
10767 if (!INTEGRAL_TYPE_P (type)
10768 && !POINTER_TYPE_P (type))
10769 return NULL_TREE;
10770
10771 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
10772 }
10773
10774 /* If TYPE is an integral or pointer type, return an integer type with
10775 the same precision which is unsigned, or itself if TYPE is already an
10776 unsigned integer type. */
10777
10778 tree
10779 unsigned_type_for (tree type)
10780 {
10781 return signed_or_unsigned_type_for (1, type);
10782 }
10783
10784 /* If TYPE is an integral or pointer type, return an integer type with
10785 the same precision which is signed, or itself if TYPE is already a
10786 signed integer type. */
10787
10788 tree
10789 signed_type_for (tree type)
10790 {
10791 return signed_or_unsigned_type_for (0, type);
10792 }
10793
10794 /* If TYPE is a vector type, return a signed integer vector type with the
10795 same width and number of subparts. Otherwise return boolean_type_node. */
10796
10797 tree
10798 truth_type_for (tree type)
10799 {
10800 if (TREE_CODE (type) == VECTOR_TYPE)
10801 {
10802 tree elem = lang_hooks.types.type_for_size
10803 (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))), 0);
10804 return build_opaque_vector_type (elem, TYPE_VECTOR_SUBPARTS (type));
10805 }
10806 else
10807 return boolean_type_node;
10808 }
10809
10810 /* Returns the largest value obtainable by casting something in INNER type to
10811 OUTER type. */
10812
10813 tree
10814 upper_bound_in_type (tree outer, tree inner)
10815 {
10816 unsigned int det = 0;
10817 unsigned oprec = TYPE_PRECISION (outer);
10818 unsigned iprec = TYPE_PRECISION (inner);
10819 unsigned prec;
10820
10821 /* Compute a unique number for every combination. */
10822 det |= (oprec > iprec) ? 4 : 0;
10823 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
10824 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
10825
10826 /* Determine the exponent to use. */
10827 switch (det)
10828 {
10829 case 0:
10830 case 1:
10831 /* oprec <= iprec, outer: signed, inner: don't care. */
10832 prec = oprec - 1;
10833 break;
10834 case 2:
10835 case 3:
10836 /* oprec <= iprec, outer: unsigned, inner: don't care. */
10837 prec = oprec;
10838 break;
10839 case 4:
10840 /* oprec > iprec, outer: signed, inner: signed. */
10841 prec = iprec - 1;
10842 break;
10843 case 5:
10844 /* oprec > iprec, outer: signed, inner: unsigned. */
10845 prec = iprec;
10846 break;
10847 case 6:
10848 /* oprec > iprec, outer: unsigned, inner: signed. */
10849 prec = oprec;
10850 break;
10851 case 7:
10852 /* oprec > iprec, outer: unsigned, inner: unsigned. */
10853 prec = iprec;
10854 break;
10855 default:
10856 gcc_unreachable ();
10857 }
10858
10859 return wide_int_to_tree (outer,
10860 wi::mask (prec, false, TYPE_PRECISION (outer)));
10861 }
10862
10863 /* Returns the smallest value obtainable by casting something in INNER type to
10864 OUTER type. */
10865
10866 tree
10867 lower_bound_in_type (tree outer, tree inner)
10868 {
10869 unsigned oprec = TYPE_PRECISION (outer);
10870 unsigned iprec = TYPE_PRECISION (inner);
10871
10872 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
10873 and obtain 0. */
10874 if (TYPE_UNSIGNED (outer)
10875 /* If we are widening something of an unsigned type, OUTER type
10876 contains all values of INNER type. In particular, both INNER
10877 and OUTER types have zero in common. */
10878 || (oprec > iprec && TYPE_UNSIGNED (inner)))
10879 return build_int_cst (outer, 0);
10880 else
10881 {
10882 /* If we are widening a signed type to another signed type, we
10883 want to obtain -2^^(iprec-1). If we are keeping the
10884 precision or narrowing to a signed type, we want to obtain
10885 -2^(oprec-1). */
10886 unsigned prec = oprec > iprec ? iprec : oprec;
10887 return wide_int_to_tree (outer,
10888 wi::mask (prec - 1, true,
10889 TYPE_PRECISION (outer)));
10890 }
10891 }
10892
10893 /* Return nonzero if two operands that are suitable for PHI nodes are
10894 necessarily equal. Specifically, both ARG0 and ARG1 must be either
10895 SSA_NAME or invariant. Note that this is strictly an optimization.
10896 That is, callers of this function can directly call operand_equal_p
10897 and get the same result, only slower. */
10898
10899 int
10900 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
10901 {
10902 if (arg0 == arg1)
10903 return 1;
10904 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
10905 return 0;
10906 return operand_equal_p (arg0, arg1, 0);
10907 }
10908
10909 /* Returns number of zeros at the end of binary representation of X. */
10910
10911 tree
10912 num_ending_zeros (const_tree x)
10913 {
10914 return build_int_cst (TREE_TYPE (x), wi::ctz (x));
10915 }
10916
10917
10918 #define WALK_SUBTREE(NODE) \
10919 do \
10920 { \
10921 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
10922 if (result) \
10923 return result; \
10924 } \
10925 while (0)
10926
10927 /* This is a subroutine of walk_tree that walks field of TYPE that are to
10928 be walked whenever a type is seen in the tree. Rest of operands and return
10929 value are as for walk_tree. */
10930
10931 static tree
10932 walk_type_fields (tree type, walk_tree_fn func, void *data,
10933 struct pointer_set_t *pset, walk_tree_lh lh)
10934 {
10935 tree result = NULL_TREE;
10936
10937 switch (TREE_CODE (type))
10938 {
10939 case POINTER_TYPE:
10940 case REFERENCE_TYPE:
10941 /* We have to worry about mutually recursive pointers. These can't
10942 be written in C. They can in Ada. It's pathological, but
10943 there's an ACATS test (c38102a) that checks it. Deal with this
10944 by checking if we're pointing to another pointer, that one
10945 points to another pointer, that one does too, and we have no htab.
10946 If so, get a hash table. We check three levels deep to avoid
10947 the cost of the hash table if we don't need one. */
10948 if (POINTER_TYPE_P (TREE_TYPE (type))
10949 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
10950 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
10951 && !pset)
10952 {
10953 result = walk_tree_without_duplicates (&TREE_TYPE (type),
10954 func, data);
10955 if (result)
10956 return result;
10957
10958 break;
10959 }
10960
10961 /* ... fall through ... */
10962
10963 case COMPLEX_TYPE:
10964 WALK_SUBTREE (TREE_TYPE (type));
10965 break;
10966
10967 case METHOD_TYPE:
10968 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
10969
10970 /* Fall through. */
10971
10972 case FUNCTION_TYPE:
10973 WALK_SUBTREE (TREE_TYPE (type));
10974 {
10975 tree arg;
10976
10977 /* We never want to walk into default arguments. */
10978 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
10979 WALK_SUBTREE (TREE_VALUE (arg));
10980 }
10981 break;
10982
10983 case ARRAY_TYPE:
10984 /* Don't follow this nodes's type if a pointer for fear that
10985 we'll have infinite recursion. If we have a PSET, then we
10986 need not fear. */
10987 if (pset
10988 || (!POINTER_TYPE_P (TREE_TYPE (type))
10989 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
10990 WALK_SUBTREE (TREE_TYPE (type));
10991 WALK_SUBTREE (TYPE_DOMAIN (type));
10992 break;
10993
10994 case OFFSET_TYPE:
10995 WALK_SUBTREE (TREE_TYPE (type));
10996 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
10997 break;
10998
10999 default:
11000 break;
11001 }
11002
11003 return NULL_TREE;
11004 }
11005
11006 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11007 called with the DATA and the address of each sub-tree. If FUNC returns a
11008 non-NULL value, the traversal is stopped, and the value returned by FUNC
11009 is returned. If PSET is non-NULL it is used to record the nodes visited,
11010 and to avoid visiting a node more than once. */
11011
11012 tree
11013 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
11014 struct pointer_set_t *pset, walk_tree_lh lh)
11015 {
11016 enum tree_code code;
11017 int walk_subtrees;
11018 tree result;
11019
11020 #define WALK_SUBTREE_TAIL(NODE) \
11021 do \
11022 { \
11023 tp = & (NODE); \
11024 goto tail_recurse; \
11025 } \
11026 while (0)
11027
11028 tail_recurse:
11029 /* Skip empty subtrees. */
11030 if (!*tp)
11031 return NULL_TREE;
11032
11033 /* Don't walk the same tree twice, if the user has requested
11034 that we avoid doing so. */
11035 if (pset && pointer_set_insert (pset, *tp))
11036 return NULL_TREE;
11037
11038 /* Call the function. */
11039 walk_subtrees = 1;
11040 result = (*func) (tp, &walk_subtrees, data);
11041
11042 /* If we found something, return it. */
11043 if (result)
11044 return result;
11045
11046 code = TREE_CODE (*tp);
11047
11048 /* Even if we didn't, FUNC may have decided that there was nothing
11049 interesting below this point in the tree. */
11050 if (!walk_subtrees)
11051 {
11052 /* But we still need to check our siblings. */
11053 if (code == TREE_LIST)
11054 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11055 else if (code == OMP_CLAUSE)
11056 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11057 else
11058 return NULL_TREE;
11059 }
11060
11061 if (lh)
11062 {
11063 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11064 if (result || !walk_subtrees)
11065 return result;
11066 }
11067
11068 switch (code)
11069 {
11070 case ERROR_MARK:
11071 case IDENTIFIER_NODE:
11072 case INTEGER_CST:
11073 case REAL_CST:
11074 case FIXED_CST:
11075 case VECTOR_CST:
11076 case STRING_CST:
11077 case BLOCK:
11078 case PLACEHOLDER_EXPR:
11079 case SSA_NAME:
11080 case FIELD_DECL:
11081 case RESULT_DECL:
11082 /* None of these have subtrees other than those already walked
11083 above. */
11084 break;
11085
11086 case TREE_LIST:
11087 WALK_SUBTREE (TREE_VALUE (*tp));
11088 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11089 break;
11090
11091 case TREE_VEC:
11092 {
11093 int len = TREE_VEC_LENGTH (*tp);
11094
11095 if (len == 0)
11096 break;
11097
11098 /* Walk all elements but the first. */
11099 while (--len)
11100 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11101
11102 /* Now walk the first one as a tail call. */
11103 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11104 }
11105
11106 case COMPLEX_CST:
11107 WALK_SUBTREE (TREE_REALPART (*tp));
11108 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11109
11110 case CONSTRUCTOR:
11111 {
11112 unsigned HOST_WIDE_INT idx;
11113 constructor_elt *ce;
11114
11115 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11116 idx++)
11117 WALK_SUBTREE (ce->value);
11118 }
11119 break;
11120
11121 case SAVE_EXPR:
11122 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11123
11124 case BIND_EXPR:
11125 {
11126 tree decl;
11127 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11128 {
11129 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11130 into declarations that are just mentioned, rather than
11131 declared; they don't really belong to this part of the tree.
11132 And, we can see cycles: the initializer for a declaration
11133 can refer to the declaration itself. */
11134 WALK_SUBTREE (DECL_INITIAL (decl));
11135 WALK_SUBTREE (DECL_SIZE (decl));
11136 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11137 }
11138 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11139 }
11140
11141 case STATEMENT_LIST:
11142 {
11143 tree_stmt_iterator i;
11144 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11145 WALK_SUBTREE (*tsi_stmt_ptr (i));
11146 }
11147 break;
11148
11149 case OMP_CLAUSE:
11150 switch (OMP_CLAUSE_CODE (*tp))
11151 {
11152 case OMP_CLAUSE_PRIVATE:
11153 case OMP_CLAUSE_SHARED:
11154 case OMP_CLAUSE_FIRSTPRIVATE:
11155 case OMP_CLAUSE_COPYIN:
11156 case OMP_CLAUSE_COPYPRIVATE:
11157 case OMP_CLAUSE_FINAL:
11158 case OMP_CLAUSE_IF:
11159 case OMP_CLAUSE_NUM_THREADS:
11160 case OMP_CLAUSE_SCHEDULE:
11161 case OMP_CLAUSE_UNIFORM:
11162 case OMP_CLAUSE_DEPEND:
11163 case OMP_CLAUSE_NUM_TEAMS:
11164 case OMP_CLAUSE_THREAD_LIMIT:
11165 case OMP_CLAUSE_DEVICE:
11166 case OMP_CLAUSE_DIST_SCHEDULE:
11167 case OMP_CLAUSE_SAFELEN:
11168 case OMP_CLAUSE_SIMDLEN:
11169 case OMP_CLAUSE__LOOPTEMP_:
11170 case OMP_CLAUSE__SIMDUID_:
11171 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11172 /* FALLTHRU */
11173
11174 case OMP_CLAUSE_NOWAIT:
11175 case OMP_CLAUSE_ORDERED:
11176 case OMP_CLAUSE_DEFAULT:
11177 case OMP_CLAUSE_UNTIED:
11178 case OMP_CLAUSE_MERGEABLE:
11179 case OMP_CLAUSE_PROC_BIND:
11180 case OMP_CLAUSE_INBRANCH:
11181 case OMP_CLAUSE_NOTINBRANCH:
11182 case OMP_CLAUSE_FOR:
11183 case OMP_CLAUSE_PARALLEL:
11184 case OMP_CLAUSE_SECTIONS:
11185 case OMP_CLAUSE_TASKGROUP:
11186 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11187
11188 case OMP_CLAUSE_LASTPRIVATE:
11189 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11190 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11191 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11192
11193 case OMP_CLAUSE_COLLAPSE:
11194 {
11195 int i;
11196 for (i = 0; i < 3; i++)
11197 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11198 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11199 }
11200
11201 case OMP_CLAUSE_ALIGNED:
11202 case OMP_CLAUSE_LINEAR:
11203 case OMP_CLAUSE_FROM:
11204 case OMP_CLAUSE_TO:
11205 case OMP_CLAUSE_MAP:
11206 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11207 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11208 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11209
11210 case OMP_CLAUSE_REDUCTION:
11211 {
11212 int i;
11213 for (i = 0; i < 4; i++)
11214 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11215 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11216 }
11217
11218 default:
11219 gcc_unreachable ();
11220 }
11221 break;
11222
11223 case TARGET_EXPR:
11224 {
11225 int i, len;
11226
11227 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11228 But, we only want to walk once. */
11229 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11230 for (i = 0; i < len; ++i)
11231 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11232 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11233 }
11234
11235 case DECL_EXPR:
11236 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11237 defining. We only want to walk into these fields of a type in this
11238 case and not in the general case of a mere reference to the type.
11239
11240 The criterion is as follows: if the field can be an expression, it
11241 must be walked only here. This should be in keeping with the fields
11242 that are directly gimplified in gimplify_type_sizes in order for the
11243 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11244 variable-sized types.
11245
11246 Note that DECLs get walked as part of processing the BIND_EXPR. */
11247 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11248 {
11249 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11250 if (TREE_CODE (*type_p) == ERROR_MARK)
11251 return NULL_TREE;
11252
11253 /* Call the function for the type. See if it returns anything or
11254 doesn't want us to continue. If we are to continue, walk both
11255 the normal fields and those for the declaration case. */
11256 result = (*func) (type_p, &walk_subtrees, data);
11257 if (result || !walk_subtrees)
11258 return result;
11259
11260 /* But do not walk a pointed-to type since it may itself need to
11261 be walked in the declaration case if it isn't anonymous. */
11262 if (!POINTER_TYPE_P (*type_p))
11263 {
11264 result = walk_type_fields (*type_p, func, data, pset, lh);
11265 if (result)
11266 return result;
11267 }
11268
11269 /* If this is a record type, also walk the fields. */
11270 if (RECORD_OR_UNION_TYPE_P (*type_p))
11271 {
11272 tree field;
11273
11274 for (field = TYPE_FIELDS (*type_p); field;
11275 field = DECL_CHAIN (field))
11276 {
11277 /* We'd like to look at the type of the field, but we can
11278 easily get infinite recursion. So assume it's pointed
11279 to elsewhere in the tree. Also, ignore things that
11280 aren't fields. */
11281 if (TREE_CODE (field) != FIELD_DECL)
11282 continue;
11283
11284 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11285 WALK_SUBTREE (DECL_SIZE (field));
11286 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11287 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11288 WALK_SUBTREE (DECL_QUALIFIER (field));
11289 }
11290 }
11291
11292 /* Same for scalar types. */
11293 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11294 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11295 || TREE_CODE (*type_p) == INTEGER_TYPE
11296 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11297 || TREE_CODE (*type_p) == REAL_TYPE)
11298 {
11299 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11300 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11301 }
11302
11303 WALK_SUBTREE (TYPE_SIZE (*type_p));
11304 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11305 }
11306 /* FALLTHRU */
11307
11308 default:
11309 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11310 {
11311 int i, len;
11312
11313 /* Walk over all the sub-trees of this operand. */
11314 len = TREE_OPERAND_LENGTH (*tp);
11315
11316 /* Go through the subtrees. We need to do this in forward order so
11317 that the scope of a FOR_EXPR is handled properly. */
11318 if (len)
11319 {
11320 for (i = 0; i < len - 1; ++i)
11321 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11322 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11323 }
11324 }
11325 /* If this is a type, walk the needed fields in the type. */
11326 else if (TYPE_P (*tp))
11327 return walk_type_fields (*tp, func, data, pset, lh);
11328 break;
11329 }
11330
11331 /* We didn't find what we were looking for. */
11332 return NULL_TREE;
11333
11334 #undef WALK_SUBTREE_TAIL
11335 }
11336 #undef WALK_SUBTREE
11337
11338 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11339
11340 tree
11341 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11342 walk_tree_lh lh)
11343 {
11344 tree result;
11345 struct pointer_set_t *pset;
11346
11347 pset = pointer_set_create ();
11348 result = walk_tree_1 (tp, func, data, pset, lh);
11349 pointer_set_destroy (pset);
11350 return result;
11351 }
11352
11353
11354 tree
11355 tree_block (tree t)
11356 {
11357 char const c = TREE_CODE_CLASS (TREE_CODE (t));
11358
11359 if (IS_EXPR_CODE_CLASS (c))
11360 return LOCATION_BLOCK (t->exp.locus);
11361 gcc_unreachable ();
11362 return NULL;
11363 }
11364
11365 void
11366 tree_set_block (tree t, tree b)
11367 {
11368 char const c = TREE_CODE_CLASS (TREE_CODE (t));
11369
11370 if (IS_EXPR_CODE_CLASS (c))
11371 {
11372 if (b)
11373 t->exp.locus = COMBINE_LOCATION_DATA (line_table, t->exp.locus, b);
11374 else
11375 t->exp.locus = LOCATION_LOCUS (t->exp.locus);
11376 }
11377 else
11378 gcc_unreachable ();
11379 }
11380
11381 /* Create a nameless artificial label and put it in the current
11382 function context. The label has a location of LOC. Returns the
11383 newly created label. */
11384
11385 tree
11386 create_artificial_label (location_t loc)
11387 {
11388 tree lab = build_decl (loc,
11389 LABEL_DECL, NULL_TREE, void_type_node);
11390
11391 DECL_ARTIFICIAL (lab) = 1;
11392 DECL_IGNORED_P (lab) = 1;
11393 DECL_CONTEXT (lab) = current_function_decl;
11394 return lab;
11395 }
11396
11397 /* Given a tree, try to return a useful variable name that we can use
11398 to prefix a temporary that is being assigned the value of the tree.
11399 I.E. given <temp> = &A, return A. */
11400
11401 const char *
11402 get_name (tree t)
11403 {
11404 tree stripped_decl;
11405
11406 stripped_decl = t;
11407 STRIP_NOPS (stripped_decl);
11408 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11409 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11410 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11411 {
11412 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11413 if (!name)
11414 return NULL;
11415 return IDENTIFIER_POINTER (name);
11416 }
11417 else
11418 {
11419 switch (TREE_CODE (stripped_decl))
11420 {
11421 case ADDR_EXPR:
11422 return get_name (TREE_OPERAND (stripped_decl, 0));
11423 default:
11424 return NULL;
11425 }
11426 }
11427 }
11428
11429 /* Return true if TYPE has a variable argument list. */
11430
11431 bool
11432 stdarg_p (const_tree fntype)
11433 {
11434 function_args_iterator args_iter;
11435 tree n = NULL_TREE, t;
11436
11437 if (!fntype)
11438 return false;
11439
11440 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11441 {
11442 n = t;
11443 }
11444
11445 return n != NULL_TREE && n != void_type_node;
11446 }
11447
11448 /* Return true if TYPE has a prototype. */
11449
11450 bool
11451 prototype_p (tree fntype)
11452 {
11453 tree t;
11454
11455 gcc_assert (fntype != NULL_TREE);
11456
11457 t = TYPE_ARG_TYPES (fntype);
11458 return (t != NULL_TREE);
11459 }
11460
11461 /* If BLOCK is inlined from an __attribute__((__artificial__))
11462 routine, return pointer to location from where it has been
11463 called. */
11464 location_t *
11465 block_nonartificial_location (tree block)
11466 {
11467 location_t *ret = NULL;
11468
11469 while (block && TREE_CODE (block) == BLOCK
11470 && BLOCK_ABSTRACT_ORIGIN (block))
11471 {
11472 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11473
11474 while (TREE_CODE (ao) == BLOCK
11475 && BLOCK_ABSTRACT_ORIGIN (ao)
11476 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
11477 ao = BLOCK_ABSTRACT_ORIGIN (ao);
11478
11479 if (TREE_CODE (ao) == FUNCTION_DECL)
11480 {
11481 /* If AO is an artificial inline, point RET to the
11482 call site locus at which it has been inlined and continue
11483 the loop, in case AO's caller is also an artificial
11484 inline. */
11485 if (DECL_DECLARED_INLINE_P (ao)
11486 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11487 ret = &BLOCK_SOURCE_LOCATION (block);
11488 else
11489 break;
11490 }
11491 else if (TREE_CODE (ao) != BLOCK)
11492 break;
11493
11494 block = BLOCK_SUPERCONTEXT (block);
11495 }
11496 return ret;
11497 }
11498
11499
11500 /* If EXP is inlined from an __attribute__((__artificial__))
11501 function, return the location of the original call expression. */
11502
11503 location_t
11504 tree_nonartificial_location (tree exp)
11505 {
11506 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11507
11508 if (loc)
11509 return *loc;
11510 else
11511 return EXPR_LOCATION (exp);
11512 }
11513
11514
11515 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
11516 nodes. */
11517
11518 /* Return the hash code code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11519
11520 static hashval_t
11521 cl_option_hash_hash (const void *x)
11522 {
11523 const_tree const t = (const_tree) x;
11524 const char *p;
11525 size_t i;
11526 size_t len = 0;
11527 hashval_t hash = 0;
11528
11529 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11530 {
11531 p = (const char *)TREE_OPTIMIZATION (t);
11532 len = sizeof (struct cl_optimization);
11533 }
11534
11535 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11536 {
11537 p = (const char *)TREE_TARGET_OPTION (t);
11538 len = sizeof (struct cl_target_option);
11539 }
11540
11541 else
11542 gcc_unreachable ();
11543
11544 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11545 something else. */
11546 for (i = 0; i < len; i++)
11547 if (p[i])
11548 hash = (hash << 4) ^ ((i << 2) | p[i]);
11549
11550 return hash;
11551 }
11552
11553 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11554 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11555 same. */
11556
11557 static int
11558 cl_option_hash_eq (const void *x, const void *y)
11559 {
11560 const_tree const xt = (const_tree) x;
11561 const_tree const yt = (const_tree) y;
11562 const char *xp;
11563 const char *yp;
11564 size_t len;
11565
11566 if (TREE_CODE (xt) != TREE_CODE (yt))
11567 return 0;
11568
11569 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11570 {
11571 xp = (const char *)TREE_OPTIMIZATION (xt);
11572 yp = (const char *)TREE_OPTIMIZATION (yt);
11573 len = sizeof (struct cl_optimization);
11574 }
11575
11576 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11577 {
11578 xp = (const char *)TREE_TARGET_OPTION (xt);
11579 yp = (const char *)TREE_TARGET_OPTION (yt);
11580 len = sizeof (struct cl_target_option);
11581 }
11582
11583 else
11584 gcc_unreachable ();
11585
11586 return (memcmp (xp, yp, len) == 0);
11587 }
11588
11589 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
11590
11591 tree
11592 build_optimization_node (struct gcc_options *opts)
11593 {
11594 tree t;
11595 void **slot;
11596
11597 /* Use the cache of optimization nodes. */
11598
11599 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11600 opts);
11601
11602 slot = htab_find_slot (cl_option_hash_table, cl_optimization_node, INSERT);
11603 t = (tree) *slot;
11604 if (!t)
11605 {
11606 /* Insert this one into the hash table. */
11607 t = cl_optimization_node;
11608 *slot = t;
11609
11610 /* Make a new node for next time round. */
11611 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11612 }
11613
11614 return t;
11615 }
11616
11617 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
11618
11619 tree
11620 build_target_option_node (struct gcc_options *opts)
11621 {
11622 tree t;
11623 void **slot;
11624
11625 /* Use the cache of optimization nodes. */
11626
11627 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11628 opts);
11629
11630 slot = htab_find_slot (cl_option_hash_table, cl_target_option_node, INSERT);
11631 t = (tree) *slot;
11632 if (!t)
11633 {
11634 /* Insert this one into the hash table. */
11635 t = cl_target_option_node;
11636 *slot = t;
11637
11638 /* Make a new node for next time round. */
11639 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11640 }
11641
11642 return t;
11643 }
11644
11645 /* Determine the "ultimate origin" of a block. The block may be an inlined
11646 instance of an inlined instance of a block which is local to an inline
11647 function, so we have to trace all of the way back through the origin chain
11648 to find out what sort of node actually served as the original seed for the
11649 given block. */
11650
11651 tree
11652 block_ultimate_origin (const_tree block)
11653 {
11654 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
11655
11656 /* output_inline_function sets BLOCK_ABSTRACT_ORIGIN for all the
11657 nodes in the function to point to themselves; ignore that if
11658 we're trying to output the abstract instance of this function. */
11659 if (BLOCK_ABSTRACT (block) && immediate_origin == block)
11660 return NULL_TREE;
11661
11662 if (immediate_origin == NULL_TREE)
11663 return NULL_TREE;
11664 else
11665 {
11666 tree ret_val;
11667 tree lookahead = immediate_origin;
11668
11669 do
11670 {
11671 ret_val = lookahead;
11672 lookahead = (TREE_CODE (ret_val) == BLOCK
11673 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
11674 }
11675 while (lookahead != NULL && lookahead != ret_val);
11676
11677 /* The block's abstract origin chain may not be the *ultimate* origin of
11678 the block. It could lead to a DECL that has an abstract origin set.
11679 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
11680 will give us if it has one). Note that DECL's abstract origins are
11681 supposed to be the most distant ancestor (or so decl_ultimate_origin
11682 claims), so we don't need to loop following the DECL origins. */
11683 if (DECL_P (ret_val))
11684 return DECL_ORIGIN (ret_val);
11685
11686 return ret_val;
11687 }
11688 }
11689
11690 /* Return true if T1 and T2 are equivalent lists. */
11691
11692 bool
11693 list_equal_p (const_tree t1, const_tree t2)
11694 {
11695 for (; t1 && t2; t1 = TREE_CHAIN (t1) , t2 = TREE_CHAIN (t2))
11696 if (TREE_VALUE (t1) != TREE_VALUE (t2))
11697 return false;
11698 return !t1 && !t2;
11699 }
11700
11701 /* Return true iff conversion in EXP generates no instruction. Mark
11702 it inline so that we fully inline into the stripping functions even
11703 though we have two uses of this function. */
11704
11705 static inline bool
11706 tree_nop_conversion (const_tree exp)
11707 {
11708 tree outer_type, inner_type;
11709
11710 if (!CONVERT_EXPR_P (exp)
11711 && TREE_CODE (exp) != NON_LVALUE_EXPR)
11712 return false;
11713 if (TREE_OPERAND (exp, 0) == error_mark_node)
11714 return false;
11715
11716 outer_type = TREE_TYPE (exp);
11717 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11718
11719 if (!inner_type)
11720 return false;
11721
11722 /* Use precision rather then machine mode when we can, which gives
11723 the correct answer even for submode (bit-field) types. */
11724 if ((INTEGRAL_TYPE_P (outer_type)
11725 || POINTER_TYPE_P (outer_type)
11726 || TREE_CODE (outer_type) == OFFSET_TYPE)
11727 && (INTEGRAL_TYPE_P (inner_type)
11728 || POINTER_TYPE_P (inner_type)
11729 || TREE_CODE (inner_type) == OFFSET_TYPE))
11730 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
11731
11732 /* Otherwise fall back on comparing machine modes (e.g. for
11733 aggregate types, floats). */
11734 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
11735 }
11736
11737 /* Return true iff conversion in EXP generates no instruction. Don't
11738 consider conversions changing the signedness. */
11739
11740 static bool
11741 tree_sign_nop_conversion (const_tree exp)
11742 {
11743 tree outer_type, inner_type;
11744
11745 if (!tree_nop_conversion (exp))
11746 return false;
11747
11748 outer_type = TREE_TYPE (exp);
11749 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11750
11751 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
11752 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
11753 }
11754
11755 /* Strip conversions from EXP according to tree_nop_conversion and
11756 return the resulting expression. */
11757
11758 tree
11759 tree_strip_nop_conversions (tree exp)
11760 {
11761 while (tree_nop_conversion (exp))
11762 exp = TREE_OPERAND (exp, 0);
11763 return exp;
11764 }
11765
11766 /* Strip conversions from EXP according to tree_sign_nop_conversion
11767 and return the resulting expression. */
11768
11769 tree
11770 tree_strip_sign_nop_conversions (tree exp)
11771 {
11772 while (tree_sign_nop_conversion (exp))
11773 exp = TREE_OPERAND (exp, 0);
11774 return exp;
11775 }
11776
11777 /* Avoid any floating point extensions from EXP. */
11778 tree
11779 strip_float_extensions (tree exp)
11780 {
11781 tree sub, expt, subt;
11782
11783 /* For floating point constant look up the narrowest type that can hold
11784 it properly and handle it like (type)(narrowest_type)constant.
11785 This way we can optimize for instance a=a*2.0 where "a" is float
11786 but 2.0 is double constant. */
11787 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
11788 {
11789 REAL_VALUE_TYPE orig;
11790 tree type = NULL;
11791
11792 orig = TREE_REAL_CST (exp);
11793 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
11794 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
11795 type = float_type_node;
11796 else if (TYPE_PRECISION (TREE_TYPE (exp))
11797 > TYPE_PRECISION (double_type_node)
11798 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
11799 type = double_type_node;
11800 if (type)
11801 return build_real (type, real_value_truncate (TYPE_MODE (type), orig));
11802 }
11803
11804 if (!CONVERT_EXPR_P (exp))
11805 return exp;
11806
11807 sub = TREE_OPERAND (exp, 0);
11808 subt = TREE_TYPE (sub);
11809 expt = TREE_TYPE (exp);
11810
11811 if (!FLOAT_TYPE_P (subt))
11812 return exp;
11813
11814 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
11815 return exp;
11816
11817 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
11818 return exp;
11819
11820 return strip_float_extensions (sub);
11821 }
11822
11823 /* Strip out all handled components that produce invariant
11824 offsets. */
11825
11826 const_tree
11827 strip_invariant_refs (const_tree op)
11828 {
11829 while (handled_component_p (op))
11830 {
11831 switch (TREE_CODE (op))
11832 {
11833 case ARRAY_REF:
11834 case ARRAY_RANGE_REF:
11835 if (!is_gimple_constant (TREE_OPERAND (op, 1))
11836 || TREE_OPERAND (op, 2) != NULL_TREE
11837 || TREE_OPERAND (op, 3) != NULL_TREE)
11838 return NULL;
11839 break;
11840
11841 case COMPONENT_REF:
11842 if (TREE_OPERAND (op, 2) != NULL_TREE)
11843 return NULL;
11844 break;
11845
11846 default:;
11847 }
11848 op = TREE_OPERAND (op, 0);
11849 }
11850
11851 return op;
11852 }
11853
11854 static GTY(()) tree gcc_eh_personality_decl;
11855
11856 /* Return the GCC personality function decl. */
11857
11858 tree
11859 lhd_gcc_personality (void)
11860 {
11861 if (!gcc_eh_personality_decl)
11862 gcc_eh_personality_decl = build_personality_function ("gcc");
11863 return gcc_eh_personality_decl;
11864 }
11865
11866 /* For languages with One Definition Rule, work out if
11867 trees are actually the same even if the tree representation
11868 differs. This handles only decls appearing in TYPE_NAME
11869 and TYPE_CONTEXT. That is NAMESPACE_DECL, TYPE_DECL,
11870 RECORD_TYPE and IDENTIFIER_NODE. */
11871
11872 static bool
11873 same_for_odr (tree t1, tree t2)
11874 {
11875 if (t1 == t2)
11876 return true;
11877 if (!t1 || !t2)
11878 return false;
11879 /* C and C++ FEs differ by using IDENTIFIER_NODE and TYPE_DECL. */
11880 if (TREE_CODE (t1) == IDENTIFIER_NODE
11881 && TREE_CODE (t2) == TYPE_DECL
11882 && DECL_FILE_SCOPE_P (t1))
11883 {
11884 t2 = DECL_NAME (t2);
11885 gcc_assert (TREE_CODE (t2) == IDENTIFIER_NODE);
11886 }
11887 if (TREE_CODE (t2) == IDENTIFIER_NODE
11888 && TREE_CODE (t1) == TYPE_DECL
11889 && DECL_FILE_SCOPE_P (t2))
11890 {
11891 t1 = DECL_NAME (t1);
11892 gcc_assert (TREE_CODE (t1) == IDENTIFIER_NODE);
11893 }
11894 if (TREE_CODE (t1) != TREE_CODE (t2))
11895 return false;
11896 if (TYPE_P (t1))
11897 return types_same_for_odr (t1, t2);
11898 if (DECL_P (t1))
11899 return decls_same_for_odr (t1, t2);
11900 return false;
11901 }
11902
11903 /* For languages with One Definition Rule, work out if
11904 decls are actually the same even if the tree representation
11905 differs. This handles only decls appearing in TYPE_NAME
11906 and TYPE_CONTEXT. That is NAMESPACE_DECL, TYPE_DECL,
11907 RECORD_TYPE and IDENTIFIER_NODE. */
11908
11909 static bool
11910 decls_same_for_odr (tree decl1, tree decl2)
11911 {
11912 if (decl1 && TREE_CODE (decl1) == TYPE_DECL
11913 && DECL_ORIGINAL_TYPE (decl1))
11914 decl1 = DECL_ORIGINAL_TYPE (decl1);
11915 if (decl2 && TREE_CODE (decl2) == TYPE_DECL
11916 && DECL_ORIGINAL_TYPE (decl2))
11917 decl2 = DECL_ORIGINAL_TYPE (decl2);
11918 if (decl1 == decl2)
11919 return true;
11920 if (!decl1 || !decl2)
11921 return false;
11922 gcc_checking_assert (DECL_P (decl1) && DECL_P (decl2));
11923 if (TREE_CODE (decl1) != TREE_CODE (decl2))
11924 return false;
11925 if (TREE_CODE (decl1) == TRANSLATION_UNIT_DECL)
11926 return true;
11927 if (TREE_CODE (decl1) != NAMESPACE_DECL
11928 && TREE_CODE (decl1) != TYPE_DECL)
11929 return false;
11930 if (!DECL_NAME (decl1))
11931 return false;
11932 gcc_checking_assert (TREE_CODE (DECL_NAME (decl1)) == IDENTIFIER_NODE);
11933 gcc_checking_assert (!DECL_NAME (decl2)
11934 || TREE_CODE (DECL_NAME (decl2)) == IDENTIFIER_NODE);
11935 if (DECL_NAME (decl1) != DECL_NAME (decl2))
11936 return false;
11937 return same_for_odr (DECL_CONTEXT (decl1),
11938 DECL_CONTEXT (decl2));
11939 }
11940
11941 /* For languages with One Definition Rule, work out if
11942 types are same even if the tree representation differs.
11943 This is non-trivial for LTO where minnor differences in
11944 the type representation may have prevented type merging
11945 to merge two copies of otherwise equivalent type. */
11946
11947 bool
11948 types_same_for_odr (tree type1, tree type2)
11949 {
11950 gcc_checking_assert (TYPE_P (type1) && TYPE_P (type2));
11951 type1 = TYPE_MAIN_VARIANT (type1);
11952 type2 = TYPE_MAIN_VARIANT (type2);
11953 if (type1 == type2)
11954 return true;
11955
11956 #ifndef ENABLE_CHECKING
11957 if (!in_lto_p)
11958 return false;
11959 #endif
11960
11961 /* Check for anonymous namespaces. Those have !TREE_PUBLIC
11962 on the corresponding TYPE_STUB_DECL. */
11963 if (type_in_anonymous_namespace_p (type1)
11964 || type_in_anonymous_namespace_p (type2))
11965 return false;
11966 /* When assembler name of virtual table is available, it is
11967 easy to compare types for equivalence. */
11968 if (TYPE_BINFO (type1) && TYPE_BINFO (type2)
11969 && BINFO_VTABLE (TYPE_BINFO (type1))
11970 && BINFO_VTABLE (TYPE_BINFO (type2)))
11971 {
11972 tree v1 = BINFO_VTABLE (TYPE_BINFO (type1));
11973 tree v2 = BINFO_VTABLE (TYPE_BINFO (type2));
11974
11975 if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
11976 {
11977 if (TREE_CODE (v2) != POINTER_PLUS_EXPR
11978 || !operand_equal_p (TREE_OPERAND (v1, 1),
11979 TREE_OPERAND (v2, 1), 0))
11980 return false;
11981 v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
11982 v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
11983 }
11984 v1 = DECL_ASSEMBLER_NAME (v1);
11985 v2 = DECL_ASSEMBLER_NAME (v2);
11986 return (v1 == v2);
11987 }
11988
11989 /* FIXME: the code comparing type names consider all instantiations of the
11990 same template to have same name. This is because we have no access
11991 to template parameters. For types with no virtual method tables
11992 we thus can return false positives. At the moment we do not need
11993 to compare types in other scenarios than devirtualization. */
11994
11995 /* If types are not structuraly same, do not bother to contnue.
11996 Match in the remainder of code would mean ODR violation. */
11997 if (!types_compatible_p (type1, type2))
11998 return false;
11999 if (!TYPE_NAME (type1))
12000 return false;
12001 if (!decls_same_for_odr (TYPE_NAME (type1), TYPE_NAME (type2)))
12002 return false;
12003 if (!same_for_odr (TYPE_CONTEXT (type1), TYPE_CONTEXT (type2)))
12004 return false;
12005 /* When not in LTO the MAIN_VARIANT check should be the same. */
12006 gcc_assert (in_lto_p);
12007
12008 return true;
12009 }
12010
12011 /* TARGET is a call target of GIMPLE call statement
12012 (obtained by gimple_call_fn). Return true if it is
12013 OBJ_TYPE_REF representing an virtual call of C++ method.
12014 (As opposed to OBJ_TYPE_REF representing objc calls
12015 through a cast where middle-end devirtualization machinery
12016 can't apply.) */
12017
12018 bool
12019 virtual_method_call_p (tree target)
12020 {
12021 if (TREE_CODE (target) != OBJ_TYPE_REF)
12022 return false;
12023 target = TREE_TYPE (target);
12024 gcc_checking_assert (TREE_CODE (target) == POINTER_TYPE);
12025 target = TREE_TYPE (target);
12026 if (TREE_CODE (target) == FUNCTION_TYPE)
12027 return false;
12028 gcc_checking_assert (TREE_CODE (target) == METHOD_TYPE);
12029 return true;
12030 }
12031
12032 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
12033
12034 tree
12035 obj_type_ref_class (tree ref)
12036 {
12037 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
12038 ref = TREE_TYPE (ref);
12039 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12040 ref = TREE_TYPE (ref);
12041 /* We look for type THIS points to. ObjC also builds
12042 OBJ_TYPE_REF with non-method calls, Their first parameter
12043 ID however also corresponds to class type. */
12044 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
12045 || TREE_CODE (ref) == FUNCTION_TYPE);
12046 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
12047 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12048 return TREE_TYPE (ref);
12049 }
12050
12051 /* Return true if T is in anonymous namespace. */
12052
12053 bool
12054 type_in_anonymous_namespace_p (tree t)
12055 {
12056 return (TYPE_STUB_DECL (t) && !TREE_PUBLIC (TYPE_STUB_DECL (t)));
12057 }
12058
12059 /* Try to find a base info of BINFO that would have its field decl at offset
12060 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
12061 found, return, otherwise return NULL_TREE. */
12062
12063 tree
12064 get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
12065 {
12066 tree type = BINFO_TYPE (binfo);
12067
12068 while (true)
12069 {
12070 HOST_WIDE_INT pos, size;
12071 tree fld;
12072 int i;
12073
12074 if (types_same_for_odr (type, expected_type))
12075 return binfo;
12076 if (offset < 0)
12077 return NULL_TREE;
12078
12079 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
12080 {
12081 if (TREE_CODE (fld) != FIELD_DECL)
12082 continue;
12083
12084 pos = int_bit_position (fld);
12085 size = tree_to_uhwi (DECL_SIZE (fld));
12086 if (pos <= offset && (pos + size) > offset)
12087 break;
12088 }
12089 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
12090 return NULL_TREE;
12091
12092 if (!DECL_ARTIFICIAL (fld))
12093 {
12094 binfo = TYPE_BINFO (TREE_TYPE (fld));
12095 if (!binfo)
12096 return NULL_TREE;
12097 }
12098 /* Offset 0 indicates the primary base, whose vtable contents are
12099 represented in the binfo for the derived class. */
12100 else if (offset != 0)
12101 {
12102 tree base_binfo, found_binfo = NULL_TREE;
12103 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12104 if (types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
12105 {
12106 found_binfo = base_binfo;
12107 break;
12108 }
12109 if (!found_binfo)
12110 return NULL_TREE;
12111 binfo = found_binfo;
12112 }
12113
12114 type = TREE_TYPE (fld);
12115 offset -= pos;
12116 }
12117 }
12118
12119 /* Returns true if X is a typedef decl. */
12120
12121 bool
12122 is_typedef_decl (tree x)
12123 {
12124 return (x && TREE_CODE (x) == TYPE_DECL
12125 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
12126 }
12127
12128 /* Returns true iff TYPE is a type variant created for a typedef. */
12129
12130 bool
12131 typedef_variant_p (tree type)
12132 {
12133 return is_typedef_decl (TYPE_NAME (type));
12134 }
12135
12136 /* Warn about a use of an identifier which was marked deprecated. */
12137 void
12138 warn_deprecated_use (tree node, tree attr)
12139 {
12140 const char *msg;
12141
12142 if (node == 0 || !warn_deprecated_decl)
12143 return;
12144
12145 if (!attr)
12146 {
12147 if (DECL_P (node))
12148 attr = DECL_ATTRIBUTES (node);
12149 else if (TYPE_P (node))
12150 {
12151 tree decl = TYPE_STUB_DECL (node);
12152 if (decl)
12153 attr = lookup_attribute ("deprecated",
12154 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12155 }
12156 }
12157
12158 if (attr)
12159 attr = lookup_attribute ("deprecated", attr);
12160
12161 if (attr)
12162 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
12163 else
12164 msg = NULL;
12165
12166 if (DECL_P (node))
12167 {
12168 expanded_location xloc = expand_location (DECL_SOURCE_LOCATION (node));
12169 if (msg)
12170 warning (OPT_Wdeprecated_declarations,
12171 "%qD is deprecated (declared at %r%s:%d%R): %s",
12172 node, "locus", xloc.file, xloc.line, msg);
12173 else
12174 warning (OPT_Wdeprecated_declarations,
12175 "%qD is deprecated (declared at %r%s:%d%R)",
12176 node, "locus", xloc.file, xloc.line);
12177 }
12178 else if (TYPE_P (node))
12179 {
12180 tree what = NULL_TREE;
12181 tree decl = TYPE_STUB_DECL (node);
12182
12183 if (TYPE_NAME (node))
12184 {
12185 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12186 what = TYPE_NAME (node);
12187 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12188 && DECL_NAME (TYPE_NAME (node)))
12189 what = DECL_NAME (TYPE_NAME (node));
12190 }
12191
12192 if (decl)
12193 {
12194 expanded_location xloc
12195 = expand_location (DECL_SOURCE_LOCATION (decl));
12196 if (what)
12197 {
12198 if (msg)
12199 warning (OPT_Wdeprecated_declarations,
12200 "%qE is deprecated (declared at %r%s:%d%R): %s",
12201 what, "locus", xloc.file, xloc.line, msg);
12202 else
12203 warning (OPT_Wdeprecated_declarations,
12204 "%qE is deprecated (declared at %r%s:%d%R)",
12205 what, "locus", xloc.file, xloc.line);
12206 }
12207 else
12208 {
12209 if (msg)
12210 warning (OPT_Wdeprecated_declarations,
12211 "type is deprecated (declared at %r%s:%d%R): %s",
12212 "locus", xloc.file, xloc.line, msg);
12213 else
12214 warning (OPT_Wdeprecated_declarations,
12215 "type is deprecated (declared at %r%s:%d%R)",
12216 "locus", xloc.file, xloc.line);
12217 }
12218 }
12219 else
12220 {
12221 if (what)
12222 {
12223 if (msg)
12224 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
12225 what, msg);
12226 else
12227 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
12228 }
12229 else
12230 {
12231 if (msg)
12232 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
12233 msg);
12234 else
12235 warning (OPT_Wdeprecated_declarations, "type is deprecated");
12236 }
12237 }
12238 }
12239 }
12240
12241 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12242 somewhere in it. */
12243
12244 bool
12245 contains_bitfld_component_ref_p (const_tree ref)
12246 {
12247 while (handled_component_p (ref))
12248 {
12249 if (TREE_CODE (ref) == COMPONENT_REF
12250 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12251 return true;
12252 ref = TREE_OPERAND (ref, 0);
12253 }
12254
12255 return false;
12256 }
12257
12258 /* Try to determine whether a TRY_CATCH expression can fall through.
12259 This is a subroutine of block_may_fallthru. */
12260
12261 static bool
12262 try_catch_may_fallthru (const_tree stmt)
12263 {
12264 tree_stmt_iterator i;
12265
12266 /* If the TRY block can fall through, the whole TRY_CATCH can
12267 fall through. */
12268 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12269 return true;
12270
12271 i = tsi_start (TREE_OPERAND (stmt, 1));
12272 switch (TREE_CODE (tsi_stmt (i)))
12273 {
12274 case CATCH_EXPR:
12275 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12276 catch expression and a body. The whole TRY_CATCH may fall
12277 through iff any of the catch bodies falls through. */
12278 for (; !tsi_end_p (i); tsi_next (&i))
12279 {
12280 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12281 return true;
12282 }
12283 return false;
12284
12285 case EH_FILTER_EXPR:
12286 /* The exception filter expression only matters if there is an
12287 exception. If the exception does not match EH_FILTER_TYPES,
12288 we will execute EH_FILTER_FAILURE, and we will fall through
12289 if that falls through. If the exception does match
12290 EH_FILTER_TYPES, the stack unwinder will continue up the
12291 stack, so we will not fall through. We don't know whether we
12292 will throw an exception which matches EH_FILTER_TYPES or not,
12293 so we just ignore EH_FILTER_TYPES and assume that we might
12294 throw an exception which doesn't match. */
12295 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12296
12297 default:
12298 /* This case represents statements to be executed when an
12299 exception occurs. Those statements are implicitly followed
12300 by a RESX statement to resume execution after the exception.
12301 So in this case the TRY_CATCH never falls through. */
12302 return false;
12303 }
12304 }
12305
12306 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12307 need not be 100% accurate; simply be conservative and return true if we
12308 don't know. This is used only to avoid stupidly generating extra code.
12309 If we're wrong, we'll just delete the extra code later. */
12310
12311 bool
12312 block_may_fallthru (const_tree block)
12313 {
12314 /* This CONST_CAST is okay because expr_last returns its argument
12315 unmodified and we assign it to a const_tree. */
12316 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12317
12318 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12319 {
12320 case GOTO_EXPR:
12321 case RETURN_EXPR:
12322 /* Easy cases. If the last statement of the block implies
12323 control transfer, then we can't fall through. */
12324 return false;
12325
12326 case SWITCH_EXPR:
12327 /* If SWITCH_LABELS is set, this is lowered, and represents a
12328 branch to a selected label and hence can not fall through.
12329 Otherwise SWITCH_BODY is set, and the switch can fall
12330 through. */
12331 return SWITCH_LABELS (stmt) == NULL_TREE;
12332
12333 case COND_EXPR:
12334 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12335 return true;
12336 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12337
12338 case BIND_EXPR:
12339 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12340
12341 case TRY_CATCH_EXPR:
12342 return try_catch_may_fallthru (stmt);
12343
12344 case TRY_FINALLY_EXPR:
12345 /* The finally clause is always executed after the try clause,
12346 so if it does not fall through, then the try-finally will not
12347 fall through. Otherwise, if the try clause does not fall
12348 through, then when the finally clause falls through it will
12349 resume execution wherever the try clause was going. So the
12350 whole try-finally will only fall through if both the try
12351 clause and the finally clause fall through. */
12352 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12353 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12354
12355 case MODIFY_EXPR:
12356 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12357 stmt = TREE_OPERAND (stmt, 1);
12358 else
12359 return true;
12360 /* FALLTHRU */
12361
12362 case CALL_EXPR:
12363 /* Functions that do not return do not fall through. */
12364 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12365
12366 case CLEANUP_POINT_EXPR:
12367 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12368
12369 case TARGET_EXPR:
12370 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12371
12372 case ERROR_MARK:
12373 return true;
12374
12375 default:
12376 return lang_hooks.block_may_fallthru (stmt);
12377 }
12378 }
12379
12380 /* True if we are using EH to handle cleanups. */
12381 static bool using_eh_for_cleanups_flag = false;
12382
12383 /* This routine is called from front ends to indicate eh should be used for
12384 cleanups. */
12385 void
12386 using_eh_for_cleanups (void)
12387 {
12388 using_eh_for_cleanups_flag = true;
12389 }
12390
12391 /* Query whether EH is used for cleanups. */
12392 bool
12393 using_eh_for_cleanups_p (void)
12394 {
12395 return using_eh_for_cleanups_flag;
12396 }
12397
12398 /* Wrapper for tree_code_name to ensure that tree code is valid */
12399 const char *
12400 get_tree_code_name (enum tree_code code)
12401 {
12402 const char *invalid = "<invalid tree code>";
12403
12404 if (code >= MAX_TREE_CODES)
12405 return invalid;
12406
12407 return tree_code_name[code];
12408 }
12409
12410 #include "gt-tree.h"