]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree.c
Merge in trunk.
[thirdparty/gcc.git] / gcc / tree.c
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
26
27 It is intended to be language-independent, but occasionally
28 calls language-dependent routines defined (for C) in typecheck.c. */
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "tm.h"
34 #include "flags.h"
35 #include "tree.h"
36 #include "tm_p.h"
37 #include "function.h"
38 #include "obstack.h"
39 #include "toplev.h" /* get_random_seed */
40 #include "ggc.h"
41 #include "hashtab.h"
42 #include "filenames.h"
43 #include "output.h"
44 #include "target.h"
45 #include "common/common-target.h"
46 #include "langhooks.h"
47 #include "tree-inline.h"
48 #include "tree-iterator.h"
49 #include "basic-block.h"
50 #include "bitmap.h"
51 #include "gimple.h"
52 #include "gimple-iterator.h"
53 #include "gimplify.h"
54 #include "gimple-ssa.h"
55 #include "cgraph.h"
56 #include "tree-phinodes.h"
57 #include "tree-ssanames.h"
58 #include "tree-dfa.h"
59 #include "params.h"
60 #include "pointer-set.h"
61 #include "tree-pass.h"
62 #include "langhooks-def.h"
63 #include "diagnostic.h"
64 #include "tree-diagnostic.h"
65 #include "tree-pretty-print.h"
66 #include "except.h"
67 #include "debug.h"
68 #include "intl.h"
69 #include "wide-int.h"
70
71 /* Tree code classes. */
72
73 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
74 #define END_OF_BASE_TREE_CODES tcc_exceptional,
75
76 const enum tree_code_class tree_code_type[] = {
77 #include "all-tree.def"
78 };
79
80 #undef DEFTREECODE
81 #undef END_OF_BASE_TREE_CODES
82
83 /* Table indexed by tree code giving number of expression
84 operands beyond the fixed part of the node structure.
85 Not used for types or decls. */
86
87 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
88 #define END_OF_BASE_TREE_CODES 0,
89
90 const unsigned char tree_code_length[] = {
91 #include "all-tree.def"
92 };
93
94 #undef DEFTREECODE
95 #undef END_OF_BASE_TREE_CODES
96
97 /* Names of tree components.
98 Used for printing out the tree and error messages. */
99 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
100 #define END_OF_BASE_TREE_CODES "@dummy",
101
102 static const char *const tree_code_name[] = {
103 #include "all-tree.def"
104 };
105
106 #undef DEFTREECODE
107 #undef END_OF_BASE_TREE_CODES
108
109 /* Each tree code class has an associated string representation.
110 These must correspond to the tree_code_class entries. */
111
112 const char *const tree_code_class_strings[] =
113 {
114 "exceptional",
115 "constant",
116 "type",
117 "declaration",
118 "reference",
119 "comparison",
120 "unary",
121 "binary",
122 "statement",
123 "vl_exp",
124 "expression"
125 };
126
127 /* obstack.[ch] explicitly declined to prototype this. */
128 extern int _obstack_allocated_p (struct obstack *h, void *obj);
129
130 /* Statistics-gathering stuff. */
131
132 static int tree_code_counts[MAX_TREE_CODES];
133 int tree_node_counts[(int) all_kinds];
134 int tree_node_sizes[(int) all_kinds];
135
136 /* Keep in sync with tree.h:enum tree_node_kind. */
137 static const char * const tree_node_kind_names[] = {
138 "decls",
139 "types",
140 "blocks",
141 "stmts",
142 "refs",
143 "exprs",
144 "constants",
145 "identifiers",
146 "vecs",
147 "binfos",
148 "ssa names",
149 "constructors",
150 "random kinds",
151 "lang_decl kinds",
152 "lang_type kinds",
153 "omp clauses",
154 };
155
156 /* Unique id for next decl created. */
157 static GTY(()) int next_decl_uid;
158 /* Unique id for next type created. */
159 static GTY(()) int next_type_uid = 1;
160 /* Unique id for next debug decl created. Use negative numbers,
161 to catch erroneous uses. */
162 static GTY(()) int next_debug_decl_uid;
163
164 /* Since we cannot rehash a type after it is in the table, we have to
165 keep the hash code. */
166
167 struct GTY(()) type_hash {
168 unsigned long hash;
169 tree type;
170 };
171
172 /* Initial size of the hash table (rounded to next prime). */
173 #define TYPE_HASH_INITIAL_SIZE 1000
174
175 /* Now here is the hash table. When recording a type, it is added to
176 the slot whose index is the hash code. Note that the hash table is
177 used for several kinds of types (function types, array types and
178 array index range types, for now). While all these live in the
179 same table, they are completely independent, and the hash code is
180 computed differently for each of these. */
181
182 static GTY ((if_marked ("type_hash_marked_p"), param_is (struct type_hash)))
183 htab_t type_hash_table;
184
185 /* Hash table and temporary node for larger integer const values. */
186 static GTY (()) tree int_cst_node;
187 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
188 htab_t int_cst_hash_table;
189
190 /* Hash table for optimization flags and target option flags. Use the same
191 hash table for both sets of options. Nodes for building the current
192 optimization and target option nodes. The assumption is most of the time
193 the options created will already be in the hash table, so we avoid
194 allocating and freeing up a node repeatably. */
195 static GTY (()) tree cl_optimization_node;
196 static GTY (()) tree cl_target_option_node;
197 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
198 htab_t cl_option_hash_table;
199
200 /* General tree->tree mapping structure for use in hash tables. */
201
202
203 static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map)))
204 htab_t debug_expr_for_decl;
205
206 static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map)))
207 htab_t value_expr_for_decl;
208
209 static GTY ((if_marked ("tree_vec_map_marked_p"), param_is (struct tree_vec_map)))
210 htab_t debug_args_for_decl;
211
212 static GTY ((if_marked ("tree_priority_map_marked_p"),
213 param_is (struct tree_priority_map)))
214 htab_t init_priority_for_decl;
215
216 static void set_type_quals (tree, int);
217 static int type_hash_eq (const void *, const void *);
218 static hashval_t type_hash_hash (const void *);
219 static hashval_t int_cst_hash_hash (const void *);
220 static int int_cst_hash_eq (const void *, const void *);
221 static hashval_t cl_option_hash_hash (const void *);
222 static int cl_option_hash_eq (const void *, const void *);
223 static void print_type_hash_statistics (void);
224 static void print_debug_expr_statistics (void);
225 static void print_value_expr_statistics (void);
226 static int type_hash_marked_p (const void *);
227 static unsigned int type_hash_list (const_tree, hashval_t);
228 static unsigned int attribute_hash_list (const_tree, hashval_t);
229 static bool decls_same_for_odr (tree decl1, tree decl2);
230
231 tree global_trees[TI_MAX];
232 tree integer_types[itk_none];
233
234 unsigned char tree_contains_struct[MAX_TREE_CODES][64];
235
236 /* Number of operands for each OpenMP clause. */
237 unsigned const char omp_clause_num_ops[] =
238 {
239 0, /* OMP_CLAUSE_ERROR */
240 1, /* OMP_CLAUSE_PRIVATE */
241 1, /* OMP_CLAUSE_SHARED */
242 1, /* OMP_CLAUSE_FIRSTPRIVATE */
243 2, /* OMP_CLAUSE_LASTPRIVATE */
244 4, /* OMP_CLAUSE_REDUCTION */
245 1, /* OMP_CLAUSE_COPYIN */
246 1, /* OMP_CLAUSE_COPYPRIVATE */
247 2, /* OMP_CLAUSE_LINEAR */
248 2, /* OMP_CLAUSE_ALIGNED */
249 1, /* OMP_CLAUSE_DEPEND */
250 1, /* OMP_CLAUSE_UNIFORM */
251 2, /* OMP_CLAUSE_FROM */
252 2, /* OMP_CLAUSE_TO */
253 2, /* OMP_CLAUSE_MAP */
254 1, /* OMP_CLAUSE__LOOPTEMP_ */
255 1, /* OMP_CLAUSE_IF */
256 1, /* OMP_CLAUSE_NUM_THREADS */
257 1, /* OMP_CLAUSE_SCHEDULE */
258 0, /* OMP_CLAUSE_NOWAIT */
259 0, /* OMP_CLAUSE_ORDERED */
260 0, /* OMP_CLAUSE_DEFAULT */
261 3, /* OMP_CLAUSE_COLLAPSE */
262 0, /* OMP_CLAUSE_UNTIED */
263 1, /* OMP_CLAUSE_FINAL */
264 0, /* OMP_CLAUSE_MERGEABLE */
265 1, /* OMP_CLAUSE_DEVICE */
266 1, /* OMP_CLAUSE_DIST_SCHEDULE */
267 0, /* OMP_CLAUSE_INBRANCH */
268 0, /* OMP_CLAUSE_NOTINBRANCH */
269 1, /* OMP_CLAUSE_NUM_TEAMS */
270 1, /* OMP_CLAUSE_THREAD_LIMIT */
271 0, /* OMP_CLAUSE_PROC_BIND */
272 1, /* OMP_CLAUSE_SAFELEN */
273 1, /* OMP_CLAUSE_SIMDLEN */
274 0, /* OMP_CLAUSE_FOR */
275 0, /* OMP_CLAUSE_PARALLEL */
276 0, /* OMP_CLAUSE_SECTIONS */
277 0, /* OMP_CLAUSE_TASKGROUP */
278 1, /* OMP_CLAUSE__SIMDUID_ */
279 };
280
281 const char * const omp_clause_code_name[] =
282 {
283 "error_clause",
284 "private",
285 "shared",
286 "firstprivate",
287 "lastprivate",
288 "reduction",
289 "copyin",
290 "copyprivate",
291 "linear",
292 "aligned",
293 "depend",
294 "uniform",
295 "from",
296 "to",
297 "map",
298 "_looptemp_",
299 "if",
300 "num_threads",
301 "schedule",
302 "nowait",
303 "ordered",
304 "default",
305 "collapse",
306 "untied",
307 "final",
308 "mergeable",
309 "device",
310 "dist_schedule",
311 "inbranch",
312 "notinbranch",
313 "num_teams",
314 "thread_limit",
315 "proc_bind",
316 "safelen",
317 "simdlen",
318 "for",
319 "parallel",
320 "sections",
321 "taskgroup",
322 "_simduid_"
323 };
324
325
326 /* Return the tree node structure used by tree code CODE. */
327
328 static inline enum tree_node_structure_enum
329 tree_node_structure_for_code (enum tree_code code)
330 {
331 switch (TREE_CODE_CLASS (code))
332 {
333 case tcc_declaration:
334 {
335 switch (code)
336 {
337 case FIELD_DECL:
338 return TS_FIELD_DECL;
339 case PARM_DECL:
340 return TS_PARM_DECL;
341 case VAR_DECL:
342 return TS_VAR_DECL;
343 case LABEL_DECL:
344 return TS_LABEL_DECL;
345 case RESULT_DECL:
346 return TS_RESULT_DECL;
347 case DEBUG_EXPR_DECL:
348 return TS_DECL_WRTL;
349 case CONST_DECL:
350 return TS_CONST_DECL;
351 case TYPE_DECL:
352 return TS_TYPE_DECL;
353 case FUNCTION_DECL:
354 return TS_FUNCTION_DECL;
355 case TRANSLATION_UNIT_DECL:
356 return TS_TRANSLATION_UNIT_DECL;
357 default:
358 return TS_DECL_NON_COMMON;
359 }
360 }
361 case tcc_type:
362 return TS_TYPE_NON_COMMON;
363 case tcc_reference:
364 case tcc_comparison:
365 case tcc_unary:
366 case tcc_binary:
367 case tcc_expression:
368 case tcc_statement:
369 case tcc_vl_exp:
370 return TS_EXP;
371 default: /* tcc_constant and tcc_exceptional */
372 break;
373 }
374 switch (code)
375 {
376 /* tcc_constant cases. */
377 case INTEGER_CST: return TS_INT_CST;
378 case REAL_CST: return TS_REAL_CST;
379 case FIXED_CST: return TS_FIXED_CST;
380 case COMPLEX_CST: return TS_COMPLEX;
381 case VECTOR_CST: return TS_VECTOR;
382 case STRING_CST: return TS_STRING;
383 /* tcc_exceptional cases. */
384 case ERROR_MARK: return TS_COMMON;
385 case IDENTIFIER_NODE: return TS_IDENTIFIER;
386 case TREE_LIST: return TS_LIST;
387 case TREE_VEC: return TS_VEC;
388 case SSA_NAME: return TS_SSA_NAME;
389 case PLACEHOLDER_EXPR: return TS_COMMON;
390 case STATEMENT_LIST: return TS_STATEMENT_LIST;
391 case BLOCK: return TS_BLOCK;
392 case CONSTRUCTOR: return TS_CONSTRUCTOR;
393 case TREE_BINFO: return TS_BINFO;
394 case OMP_CLAUSE: return TS_OMP_CLAUSE;
395 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
396 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
397
398 default:
399 gcc_unreachable ();
400 }
401 }
402
403
404 /* Initialize tree_contains_struct to describe the hierarchy of tree
405 nodes. */
406
407 static void
408 initialize_tree_contains_struct (void)
409 {
410 unsigned i;
411
412 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
413 {
414 enum tree_code code;
415 enum tree_node_structure_enum ts_code;
416
417 code = (enum tree_code) i;
418 ts_code = tree_node_structure_for_code (code);
419
420 /* Mark the TS structure itself. */
421 tree_contains_struct[code][ts_code] = 1;
422
423 /* Mark all the structures that TS is derived from. */
424 switch (ts_code)
425 {
426 case TS_TYPED:
427 case TS_BLOCK:
428 MARK_TS_BASE (code);
429 break;
430
431 case TS_COMMON:
432 case TS_INT_CST:
433 case TS_REAL_CST:
434 case TS_FIXED_CST:
435 case TS_VECTOR:
436 case TS_STRING:
437 case TS_COMPLEX:
438 case TS_SSA_NAME:
439 case TS_CONSTRUCTOR:
440 case TS_EXP:
441 case TS_STATEMENT_LIST:
442 MARK_TS_TYPED (code);
443 break;
444
445 case TS_IDENTIFIER:
446 case TS_DECL_MINIMAL:
447 case TS_TYPE_COMMON:
448 case TS_LIST:
449 case TS_VEC:
450 case TS_BINFO:
451 case TS_OMP_CLAUSE:
452 case TS_OPTIMIZATION:
453 case TS_TARGET_OPTION:
454 MARK_TS_COMMON (code);
455 break;
456
457 case TS_TYPE_WITH_LANG_SPECIFIC:
458 MARK_TS_TYPE_COMMON (code);
459 break;
460
461 case TS_TYPE_NON_COMMON:
462 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
463 break;
464
465 case TS_DECL_COMMON:
466 MARK_TS_DECL_MINIMAL (code);
467 break;
468
469 case TS_DECL_WRTL:
470 case TS_CONST_DECL:
471 MARK_TS_DECL_COMMON (code);
472 break;
473
474 case TS_DECL_NON_COMMON:
475 MARK_TS_DECL_WITH_VIS (code);
476 break;
477
478 case TS_DECL_WITH_VIS:
479 case TS_PARM_DECL:
480 case TS_LABEL_DECL:
481 case TS_RESULT_DECL:
482 MARK_TS_DECL_WRTL (code);
483 break;
484
485 case TS_FIELD_DECL:
486 MARK_TS_DECL_COMMON (code);
487 break;
488
489 case TS_VAR_DECL:
490 MARK_TS_DECL_WITH_VIS (code);
491 break;
492
493 case TS_TYPE_DECL:
494 case TS_FUNCTION_DECL:
495 MARK_TS_DECL_NON_COMMON (code);
496 break;
497
498 case TS_TRANSLATION_UNIT_DECL:
499 MARK_TS_DECL_COMMON (code);
500 break;
501
502 default:
503 gcc_unreachable ();
504 }
505 }
506
507 /* Basic consistency checks for attributes used in fold. */
508 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
509 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
510 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
511 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
512 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
513 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
514 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
515 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
516 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
517 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
518 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
519 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
520 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
521 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
522 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
523 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
524 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
525 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
526 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
527 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
528 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
529 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
530 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
531 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
532 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
533 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
534 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
535 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
536 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
537 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
538 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
539 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
540 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
541 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
542 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
543 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
544 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
545 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
546 }
547
548
549 /* Init tree.c. */
550
551 void
552 init_ttree (void)
553 {
554 /* Initialize the hash table of types. */
555 type_hash_table = htab_create_ggc (TYPE_HASH_INITIAL_SIZE, type_hash_hash,
556 type_hash_eq, 0);
557
558 debug_expr_for_decl = htab_create_ggc (512, tree_decl_map_hash,
559 tree_decl_map_eq, 0);
560
561 value_expr_for_decl = htab_create_ggc (512, tree_decl_map_hash,
562 tree_decl_map_eq, 0);
563 init_priority_for_decl = htab_create_ggc (512, tree_priority_map_hash,
564 tree_priority_map_eq, 0);
565
566 int_cst_hash_table = htab_create_ggc (1024, int_cst_hash_hash,
567 int_cst_hash_eq, NULL);
568
569 int_cst_node = make_int_cst (1, 1);
570
571 cl_option_hash_table = htab_create_ggc (64, cl_option_hash_hash,
572 cl_option_hash_eq, NULL);
573
574 cl_optimization_node = make_node (OPTIMIZATION_NODE);
575 cl_target_option_node = make_node (TARGET_OPTION_NODE);
576
577 /* Initialize the tree_contains_struct array. */
578 initialize_tree_contains_struct ();
579 lang_hooks.init_ts ();
580 }
581
582 \f
583 /* The name of the object as the assembler will see it (but before any
584 translations made by ASM_OUTPUT_LABELREF). Often this is the same
585 as DECL_NAME. It is an IDENTIFIER_NODE. */
586 tree
587 decl_assembler_name (tree decl)
588 {
589 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
590 lang_hooks.set_decl_assembler_name (decl);
591 return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name;
592 }
593
594 /* Compute the number of bytes occupied by a tree with code CODE.
595 This function cannot be used for nodes that have variable sizes,
596 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
597 size_t
598 tree_code_size (enum tree_code code)
599 {
600 switch (TREE_CODE_CLASS (code))
601 {
602 case tcc_declaration: /* A decl node */
603 {
604 switch (code)
605 {
606 case FIELD_DECL:
607 return sizeof (struct tree_field_decl);
608 case PARM_DECL:
609 return sizeof (struct tree_parm_decl);
610 case VAR_DECL:
611 return sizeof (struct tree_var_decl);
612 case LABEL_DECL:
613 return sizeof (struct tree_label_decl);
614 case RESULT_DECL:
615 return sizeof (struct tree_result_decl);
616 case CONST_DECL:
617 return sizeof (struct tree_const_decl);
618 case TYPE_DECL:
619 return sizeof (struct tree_type_decl);
620 case FUNCTION_DECL:
621 return sizeof (struct tree_function_decl);
622 case DEBUG_EXPR_DECL:
623 return sizeof (struct tree_decl_with_rtl);
624 default:
625 return sizeof (struct tree_decl_non_common);
626 }
627 }
628
629 case tcc_type: /* a type node */
630 return sizeof (struct tree_type_non_common);
631
632 case tcc_reference: /* a reference */
633 case tcc_expression: /* an expression */
634 case tcc_statement: /* an expression with side effects */
635 case tcc_comparison: /* a comparison expression */
636 case tcc_unary: /* a unary arithmetic expression */
637 case tcc_binary: /* a binary arithmetic expression */
638 return (sizeof (struct tree_exp)
639 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
640
641 case tcc_constant: /* a constant */
642 switch (code)
643 {
644 case INTEGER_CST: gcc_unreachable ();
645 case REAL_CST: return sizeof (struct tree_real_cst);
646 case FIXED_CST: return sizeof (struct tree_fixed_cst);
647 case COMPLEX_CST: return sizeof (struct tree_complex);
648 case VECTOR_CST: return sizeof (struct tree_vector);
649 case STRING_CST: gcc_unreachable ();
650 default:
651 return lang_hooks.tree_size (code);
652 }
653
654 case tcc_exceptional: /* something random, like an identifier. */
655 switch (code)
656 {
657 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
658 case TREE_LIST: return sizeof (struct tree_list);
659
660 case ERROR_MARK:
661 case PLACEHOLDER_EXPR: return sizeof (struct tree_common);
662
663 case TREE_VEC:
664 case OMP_CLAUSE: gcc_unreachable ();
665
666 case SSA_NAME: return sizeof (struct tree_ssa_name);
667
668 case STATEMENT_LIST: return sizeof (struct tree_statement_list);
669 case BLOCK: return sizeof (struct tree_block);
670 case CONSTRUCTOR: return sizeof (struct tree_constructor);
671 case OPTIMIZATION_NODE: return sizeof (struct tree_optimization_option);
672 case TARGET_OPTION_NODE: return sizeof (struct tree_target_option);
673
674 default:
675 return lang_hooks.tree_size (code);
676 }
677
678 default:
679 gcc_unreachable ();
680 }
681 }
682
683 /* Compute the number of bytes occupied by NODE. This routine only
684 looks at TREE_CODE, except for those nodes that have variable sizes. */
685 size_t
686 tree_size (const_tree node)
687 {
688 const enum tree_code code = TREE_CODE (node);
689 switch (code)
690 {
691 case INTEGER_CST:
692 return (sizeof (struct tree_int_cst)
693 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
694
695 case TREE_BINFO:
696 return (offsetof (struct tree_binfo, base_binfos)
697 + vec<tree, va_gc>
698 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
699
700 case TREE_VEC:
701 return (sizeof (struct tree_vec)
702 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
703
704 case VECTOR_CST:
705 return (sizeof (struct tree_vector)
706 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node)) - 1) * sizeof (tree));
707
708 case STRING_CST:
709 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
710
711 case OMP_CLAUSE:
712 return (sizeof (struct tree_omp_clause)
713 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
714 * sizeof (tree));
715
716 default:
717 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
718 return (sizeof (struct tree_exp)
719 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
720 else
721 return tree_code_size (code);
722 }
723 }
724
725 /* Record interesting allocation statistics for a tree node with CODE
726 and LENGTH. */
727
728 static void
729 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
730 size_t length ATTRIBUTE_UNUSED)
731 {
732 enum tree_code_class type = TREE_CODE_CLASS (code);
733 tree_node_kind kind;
734
735 if (!GATHER_STATISTICS)
736 return;
737
738 switch (type)
739 {
740 case tcc_declaration: /* A decl node */
741 kind = d_kind;
742 break;
743
744 case tcc_type: /* a type node */
745 kind = t_kind;
746 break;
747
748 case tcc_statement: /* an expression with side effects */
749 kind = s_kind;
750 break;
751
752 case tcc_reference: /* a reference */
753 kind = r_kind;
754 break;
755
756 case tcc_expression: /* an expression */
757 case tcc_comparison: /* a comparison expression */
758 case tcc_unary: /* a unary arithmetic expression */
759 case tcc_binary: /* a binary arithmetic expression */
760 kind = e_kind;
761 break;
762
763 case tcc_constant: /* a constant */
764 kind = c_kind;
765 break;
766
767 case tcc_exceptional: /* something random, like an identifier. */
768 switch (code)
769 {
770 case IDENTIFIER_NODE:
771 kind = id_kind;
772 break;
773
774 case TREE_VEC:
775 kind = vec_kind;
776 break;
777
778 case TREE_BINFO:
779 kind = binfo_kind;
780 break;
781
782 case SSA_NAME:
783 kind = ssa_name_kind;
784 break;
785
786 case BLOCK:
787 kind = b_kind;
788 break;
789
790 case CONSTRUCTOR:
791 kind = constr_kind;
792 break;
793
794 case OMP_CLAUSE:
795 kind = omp_clause_kind;
796 break;
797
798 default:
799 kind = x_kind;
800 break;
801 }
802 break;
803
804 case tcc_vl_exp:
805 kind = e_kind;
806 break;
807
808 default:
809 gcc_unreachable ();
810 }
811
812 tree_code_counts[(int) code]++;
813 tree_node_counts[(int) kind]++;
814 tree_node_sizes[(int) kind] += length;
815 }
816
817 /* Allocate and return a new UID from the DECL_UID namespace. */
818
819 int
820 allocate_decl_uid (void)
821 {
822 return next_decl_uid++;
823 }
824
825 /* Return a newly allocated node of code CODE. For decl and type
826 nodes, some other fields are initialized. The rest of the node is
827 initialized to zero. This function cannot be used for TREE_VEC,
828 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
829 tree_code_size.
830
831 Achoo! I got a code in the node. */
832
833 tree
834 make_node_stat (enum tree_code code MEM_STAT_DECL)
835 {
836 tree t;
837 enum tree_code_class type = TREE_CODE_CLASS (code);
838 size_t length = tree_code_size (code);
839
840 record_node_allocation_statistics (code, length);
841
842 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
843 TREE_SET_CODE (t, code);
844
845 switch (type)
846 {
847 case tcc_statement:
848 TREE_SIDE_EFFECTS (t) = 1;
849 break;
850
851 case tcc_declaration:
852 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
853 {
854 if (code == FUNCTION_DECL)
855 {
856 DECL_ALIGN (t) = FUNCTION_BOUNDARY;
857 DECL_MODE (t) = FUNCTION_MODE;
858 }
859 else
860 DECL_ALIGN (t) = 1;
861 }
862 DECL_SOURCE_LOCATION (t) = input_location;
863 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
864 DECL_UID (t) = --next_debug_decl_uid;
865 else
866 {
867 DECL_UID (t) = allocate_decl_uid ();
868 SET_DECL_PT_UID (t, -1);
869 }
870 if (TREE_CODE (t) == LABEL_DECL)
871 LABEL_DECL_UID (t) = -1;
872
873 break;
874
875 case tcc_type:
876 TYPE_UID (t) = next_type_uid++;
877 TYPE_ALIGN (t) = BITS_PER_UNIT;
878 TYPE_USER_ALIGN (t) = 0;
879 TYPE_MAIN_VARIANT (t) = t;
880 TYPE_CANONICAL (t) = t;
881
882 /* Default to no attributes for type, but let target change that. */
883 TYPE_ATTRIBUTES (t) = NULL_TREE;
884 targetm.set_default_type_attributes (t);
885
886 /* We have not yet computed the alias set for this type. */
887 TYPE_ALIAS_SET (t) = -1;
888 break;
889
890 case tcc_constant:
891 TREE_CONSTANT (t) = 1;
892 break;
893
894 case tcc_expression:
895 switch (code)
896 {
897 case INIT_EXPR:
898 case MODIFY_EXPR:
899 case VA_ARG_EXPR:
900 case PREDECREMENT_EXPR:
901 case PREINCREMENT_EXPR:
902 case POSTDECREMENT_EXPR:
903 case POSTINCREMENT_EXPR:
904 /* All of these have side-effects, no matter what their
905 operands are. */
906 TREE_SIDE_EFFECTS (t) = 1;
907 break;
908
909 default:
910 break;
911 }
912 break;
913
914 default:
915 /* Other classes need no special treatment. */
916 break;
917 }
918
919 return t;
920 }
921 \f
922 /* Return a new node with the same contents as NODE except that its
923 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
924
925 tree
926 copy_node_stat (tree node MEM_STAT_DECL)
927 {
928 tree t;
929 enum tree_code code = TREE_CODE (node);
930 size_t length;
931
932 gcc_assert (code != STATEMENT_LIST);
933
934 length = tree_size (node);
935 record_node_allocation_statistics (code, length);
936 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
937 memcpy (t, node, length);
938
939 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
940 TREE_CHAIN (t) = 0;
941 TREE_ASM_WRITTEN (t) = 0;
942 TREE_VISITED (t) = 0;
943
944 if (TREE_CODE_CLASS (code) == tcc_declaration)
945 {
946 if (code == DEBUG_EXPR_DECL)
947 DECL_UID (t) = --next_debug_decl_uid;
948 else
949 {
950 DECL_UID (t) = allocate_decl_uid ();
951 if (DECL_PT_UID_SET_P (node))
952 SET_DECL_PT_UID (t, DECL_PT_UID (node));
953 }
954 if ((TREE_CODE (node) == PARM_DECL || TREE_CODE (node) == VAR_DECL)
955 && DECL_HAS_VALUE_EXPR_P (node))
956 {
957 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
958 DECL_HAS_VALUE_EXPR_P (t) = 1;
959 }
960 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
961 if (TREE_CODE (node) == VAR_DECL)
962 DECL_HAS_DEBUG_EXPR_P (t) = 0;
963 if (TREE_CODE (node) == VAR_DECL && DECL_HAS_INIT_PRIORITY_P (node))
964 {
965 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
966 DECL_HAS_INIT_PRIORITY_P (t) = 1;
967 }
968 if (TREE_CODE (node) == FUNCTION_DECL)
969 DECL_STRUCT_FUNCTION (t) = NULL;
970 }
971 else if (TREE_CODE_CLASS (code) == tcc_type)
972 {
973 TYPE_UID (t) = next_type_uid++;
974 /* The following is so that the debug code for
975 the copy is different from the original type.
976 The two statements usually duplicate each other
977 (because they clear fields of the same union),
978 but the optimizer should catch that. */
979 TYPE_SYMTAB_POINTER (t) = 0;
980 TYPE_SYMTAB_ADDRESS (t) = 0;
981
982 /* Do not copy the values cache. */
983 if (TYPE_CACHED_VALUES_P (t))
984 {
985 TYPE_CACHED_VALUES_P (t) = 0;
986 TYPE_CACHED_VALUES (t) = NULL_TREE;
987 }
988 }
989
990 return t;
991 }
992
993 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
994 For example, this can copy a list made of TREE_LIST nodes. */
995
996 tree
997 copy_list (tree list)
998 {
999 tree head;
1000 tree prev, next;
1001
1002 if (list == 0)
1003 return 0;
1004
1005 head = prev = copy_node (list);
1006 next = TREE_CHAIN (list);
1007 while (next)
1008 {
1009 TREE_CHAIN (prev) = copy_node (next);
1010 prev = TREE_CHAIN (prev);
1011 next = TREE_CHAIN (next);
1012 }
1013 return head;
1014 }
1015
1016 \f
1017 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1018 INTEGER_CST with value CST and type TYPE. */
1019
1020 static unsigned int
1021 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1022 {
1023 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1024 /* We need an extra zero HWI if CST is an unsigned integer with its
1025 upper bit set, and if CST occupies a whole number of HWIs. */
1026 if (TYPE_UNSIGNED (type)
1027 && wi::neg_p (cst)
1028 && (cst.get_precision () % HOST_BITS_PER_WIDE_INT) == 0)
1029 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1030 return cst.get_len ();
1031 }
1032
1033 /* Return a new INTEGER_CST with value CST and type TYPE. */
1034
1035 static tree
1036 build_new_int_cst (tree type, const wide_int &cst)
1037 {
1038 unsigned int len = cst.get_len ();
1039 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1040 tree nt = make_int_cst (len, ext_len);
1041
1042 if (len < ext_len)
1043 {
1044 --ext_len;
1045 TREE_INT_CST_ELT (nt, ext_len) = 0;
1046 for (unsigned int i = len; i < ext_len; ++i)
1047 TREE_INT_CST_ELT (nt, i) = -1;
1048 }
1049 else if (TYPE_UNSIGNED (type)
1050 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1051 {
1052 len--;
1053 TREE_INT_CST_ELT (nt, len)
1054 = zext_hwi (cst.elt (len),
1055 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1056 }
1057
1058 for (unsigned int i = 0; i < len; i++)
1059 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1060 TREE_TYPE (nt) = type;
1061 return nt;
1062 }
1063
1064 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1065
1066 tree
1067 build_int_cst (tree type, HOST_WIDE_INT low)
1068 {
1069 /* Support legacy code. */
1070 if (!type)
1071 type = integer_type_node;
1072
1073 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1074 }
1075
1076 tree
1077 build_int_cstu (tree type, unsigned HOST_WIDE_INT cst)
1078 {
1079 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1080 }
1081
1082 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1083
1084 tree
1085 build_int_cst_type (tree type, HOST_WIDE_INT low)
1086 {
1087 gcc_assert (type);
1088 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1089 }
1090
1091 /* Constructs tree in type TYPE from with value given by CST. Signedness
1092 of CST is assumed to be the same as the signedness of TYPE. */
1093
1094 tree
1095 double_int_to_tree (tree type, double_int cst)
1096 {
1097 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1098 }
1099
1100 /* We force the wide_int CST to the range of the type TYPE by sign or
1101 zero extending it. OVERFLOWABLE indicates if we are interested in
1102 overflow of the value, when >0 we are only interested in signed
1103 overflow, for <0 we are interested in any overflow. OVERFLOWED
1104 indicates whether overflow has already occurred. CONST_OVERFLOWED
1105 indicates whether constant overflow has already occurred. We force
1106 T's value to be within range of T's type (by setting to 0 or 1 all
1107 the bits outside the type's range). We set TREE_OVERFLOWED if,
1108 OVERFLOWED is nonzero,
1109 or OVERFLOWABLE is >0 and signed overflow occurs
1110 or OVERFLOWABLE is <0 and any overflow occurs
1111 We return a new tree node for the extended wide_int. The node
1112 is shared if no overflow flags are set. */
1113
1114
1115 tree
1116 force_fit_type (tree type, const wide_int_ref &cst,
1117 int overflowable, bool overflowed)
1118 {
1119 signop sign = TYPE_SIGN (type);
1120
1121 /* If we need to set overflow flags, return a new unshared node. */
1122 if (overflowed || !wi::fits_to_tree_p (cst, type))
1123 {
1124 if (overflowed
1125 || overflowable < 0
1126 || (overflowable > 0 && sign == SIGNED))
1127 {
1128 wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign);
1129 tree t = build_new_int_cst (type, tmp);
1130 TREE_OVERFLOW (t) = 1;
1131 return t;
1132 }
1133 }
1134
1135 /* Else build a shared node. */
1136 return wide_int_to_tree (type, cst);
1137 }
1138
1139 /* These are the hash table functions for the hash table of INTEGER_CST
1140 nodes of a sizetype. */
1141
1142 /* Return the hash code code X, an INTEGER_CST. */
1143
1144 static hashval_t
1145 int_cst_hash_hash (const void *x)
1146 {
1147 const_tree const t = (const_tree) x;
1148 hashval_t code = htab_hash_pointer (TREE_TYPE (t));
1149 int i;
1150
1151 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1152 code ^= TREE_INT_CST_ELT (t, i);
1153
1154 return code;
1155 }
1156
1157 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1158 is the same as that given by *Y, which is the same. */
1159
1160 static int
1161 int_cst_hash_eq (const void *x, const void *y)
1162 {
1163 const_tree const xt = (const_tree) x;
1164 const_tree const yt = (const_tree) y;
1165
1166 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1167 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1168 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1169 return false;
1170
1171 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1172 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1173 return false;
1174
1175 return true;
1176 }
1177
1178 /* Create an INT_CST node of TYPE and value CST.
1179 The returned node is always shared. For small integers we use a
1180 per-type vector cache, for larger ones we use a single hash table.
1181 The value is extended from it's precision according to the sign of
1182 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1183 the upper bits and ensures that hashing and value equality based
1184 upon the underlying HOST_WIDE_INTs works without masking. */
1185
1186 tree
1187 wide_int_to_tree (tree type, const wide_int_ref &pcst)
1188 {
1189 tree t;
1190 int ix = -1;
1191 int limit = 0;
1192
1193 gcc_assert (type);
1194 unsigned int prec = TYPE_PRECISION (type);
1195 signop sgn = TYPE_SIGN (type);
1196
1197 /* Verify that everything is canonical. */
1198 int l = pcst.get_len ();
1199 if (l > 1)
1200 {
1201 if (pcst.elt (l - 1) == 0)
1202 gcc_assert (pcst.elt (l - 2) < 0);
1203 if (pcst.elt (l - 1) == (HOST_WIDE_INT) -1)
1204 gcc_assert (pcst.elt (l - 2) >= 0);
1205 }
1206
1207 wide_int cst = wide_int::from (pcst, prec, sgn);
1208 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1209
1210 switch (TREE_CODE (type))
1211 {
1212 case NULLPTR_TYPE:
1213 gcc_assert (cst == 0);
1214 /* Fallthru. */
1215
1216 case POINTER_TYPE:
1217 case REFERENCE_TYPE:
1218 case POINTER_BOUNDS_TYPE:
1219 /* Cache NULL pointer and zero bounds. */
1220 if (cst == 0)
1221 {
1222 limit = 1;
1223 ix = 0;
1224 }
1225 break;
1226
1227 case BOOLEAN_TYPE:
1228 /* Cache false or true. */
1229 limit = 2;
1230 if (wi::leu_p (cst, 1))
1231 ix = cst.to_uhwi ();
1232 break;
1233
1234 case INTEGER_TYPE:
1235 case OFFSET_TYPE:
1236 if (TYPE_SIGN (type) == UNSIGNED)
1237 {
1238 /* Cache 0..N */
1239 limit = INTEGER_SHARE_LIMIT;
1240
1241 /* This is a little hokie, but if the prec is smaller than
1242 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1243 obvious test will not get the correct answer. */
1244 if (prec < HOST_BITS_PER_WIDE_INT)
1245 {
1246 if (cst.to_uhwi () < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1247 ix = cst.to_uhwi ();
1248 }
1249 else if (wi::ltu_p (cst, INTEGER_SHARE_LIMIT))
1250 ix = cst.to_uhwi ();
1251 }
1252 else
1253 {
1254 /* Cache -1..N */
1255 limit = INTEGER_SHARE_LIMIT + 1;
1256
1257 if (cst == -1)
1258 ix = 0;
1259 else if (!wi::neg_p (cst))
1260 {
1261 if (prec < HOST_BITS_PER_WIDE_INT)
1262 {
1263 if (cst.to_shwi () < INTEGER_SHARE_LIMIT)
1264 ix = cst.to_shwi () + 1;
1265 }
1266 else if (wi::lts_p (cst, INTEGER_SHARE_LIMIT))
1267 ix = cst.to_shwi () + 1;
1268 }
1269 }
1270 break;
1271
1272 case ENUMERAL_TYPE:
1273 break;
1274
1275 default:
1276 gcc_unreachable ();
1277 }
1278
1279 if (ext_len == 1)
1280 {
1281 /* We just need to store a single HOST_WIDE_INT. */
1282 HOST_WIDE_INT hwi;
1283 if (TYPE_UNSIGNED (type))
1284 hwi = cst.to_uhwi ();
1285 else
1286 hwi = cst.to_shwi ();
1287 if (ix >= 0)
1288 {
1289 /* Look for it in the type's vector of small shared ints. */
1290 if (!TYPE_CACHED_VALUES_P (type))
1291 {
1292 TYPE_CACHED_VALUES_P (type) = 1;
1293 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1294 }
1295
1296 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1297 if (t)
1298 /* Make sure no one is clobbering the shared constant. */
1299 gcc_assert (TREE_TYPE (t) == type
1300 && TREE_INT_CST_NUNITS (t) == 1
1301 && TREE_INT_CST_EXT_NUNITS (t) == 1
1302 && TREE_INT_CST_ELT (t, 0) == hwi);
1303 else
1304 {
1305 /* Create a new shared int. */
1306 t = build_new_int_cst (type, cst);
1307 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1308 }
1309 }
1310 else
1311 {
1312 /* Use the cache of larger shared ints, using int_cst_node as
1313 a temporary. */
1314 void **slot;
1315
1316 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1317 TREE_TYPE (int_cst_node) = type;
1318
1319 slot = htab_find_slot (int_cst_hash_table, int_cst_node, INSERT);
1320 t = (tree) *slot;
1321 if (!t)
1322 {
1323 /* Insert this one into the hash table. */
1324 t = int_cst_node;
1325 *slot = t;
1326 /* Make a new node for next time round. */
1327 int_cst_node = make_int_cst (1, 1);
1328 }
1329 }
1330 }
1331 else
1332 {
1333 /* The value either hashes properly or we drop it on the floor
1334 for the gc to take care of. There will not be enough of them
1335 to worry about. */
1336 void **slot;
1337
1338 tree nt = build_new_int_cst (type, cst);
1339 slot = htab_find_slot (int_cst_hash_table, nt, INSERT);
1340 t = (tree) *slot;
1341 if (!t)
1342 {
1343 /* Insert this one into the hash table. */
1344 t = nt;
1345 *slot = t;
1346 }
1347 }
1348
1349 return t;
1350 }
1351
1352 void
1353 cache_integer_cst (tree t)
1354 {
1355 tree type = TREE_TYPE (t);
1356 int ix = -1;
1357 int limit = 0;
1358 int prec = TYPE_PRECISION (type);
1359
1360 gcc_assert (!TREE_OVERFLOW (t));
1361
1362 switch (TREE_CODE (type))
1363 {
1364 case NULLPTR_TYPE:
1365 gcc_assert (integer_zerop (t));
1366 /* Fallthru. */
1367
1368 case POINTER_TYPE:
1369 case REFERENCE_TYPE:
1370 /* Cache NULL pointer. */
1371 if (integer_zerop (t))
1372 {
1373 limit = 1;
1374 ix = 0;
1375 }
1376 break;
1377
1378 case BOOLEAN_TYPE:
1379 /* Cache false or true. */
1380 limit = 2;
1381 if (wi::ltu_p (t, 2))
1382 ix = TREE_INT_CST_ELT (t, 0);
1383 break;
1384
1385 case INTEGER_TYPE:
1386 case OFFSET_TYPE:
1387 if (TYPE_UNSIGNED (type))
1388 {
1389 /* Cache 0..N */
1390 limit = INTEGER_SHARE_LIMIT;
1391
1392 /* This is a little hokie, but if the prec is smaller than
1393 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1394 obvious test will not get the correct answer. */
1395 if (prec < HOST_BITS_PER_WIDE_INT)
1396 {
1397 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1398 ix = tree_to_uhwi (t);
1399 }
1400 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1401 ix = tree_to_uhwi (t);
1402 }
1403 else
1404 {
1405 /* Cache -1..N */
1406 limit = INTEGER_SHARE_LIMIT + 1;
1407
1408 if (integer_minus_onep (t))
1409 ix = 0;
1410 else if (!wi::neg_p (t))
1411 {
1412 if (prec < HOST_BITS_PER_WIDE_INT)
1413 {
1414 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1415 ix = tree_to_shwi (t) + 1;
1416 }
1417 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1418 ix = tree_to_shwi (t) + 1;
1419 }
1420 }
1421 break;
1422
1423 case ENUMERAL_TYPE:
1424 break;
1425
1426 default:
1427 gcc_unreachable ();
1428 }
1429
1430 if (ix >= 0)
1431 {
1432 /* Look for it in the type's vector of small shared ints. */
1433 if (!TYPE_CACHED_VALUES_P (type))
1434 {
1435 TYPE_CACHED_VALUES_P (type) = 1;
1436 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1437 }
1438
1439 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1440 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1441 }
1442 else
1443 {
1444 /* Use the cache of larger shared ints. */
1445 void **slot;
1446
1447 slot = htab_find_slot (int_cst_hash_table, t, INSERT);
1448 /* If there is already an entry for the number verify it's the
1449 same. */
1450 if (*slot)
1451 gcc_assert (wi::eq_p (tree (*slot), t));
1452 else
1453 /* Otherwise insert this one into the hash table. */
1454 *slot = t;
1455 }
1456 }
1457
1458
1459 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1460 and the rest are zeros. */
1461
1462 tree
1463 build_low_bits_mask (tree type, unsigned bits)
1464 {
1465 gcc_assert (bits <= TYPE_PRECISION (type));
1466
1467 return wide_int_to_tree (type, wi::mask (bits, false,
1468 TYPE_PRECISION (type)));
1469 }
1470
1471 /* Build a newly constructed TREE_VEC node of length LEN. */
1472
1473 tree
1474 make_vector_stat (unsigned len MEM_STAT_DECL)
1475 {
1476 tree t;
1477 unsigned length = (len - 1) * sizeof (tree) + sizeof (struct tree_vector);
1478
1479 record_node_allocation_statistics (VECTOR_CST, length);
1480
1481 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1482
1483 TREE_SET_CODE (t, VECTOR_CST);
1484 TREE_CONSTANT (t) = 1;
1485
1486 return t;
1487 }
1488
1489 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1490 are in a list pointed to by VALS. */
1491
1492 tree
1493 build_vector_stat (tree type, tree *vals MEM_STAT_DECL)
1494 {
1495 int over = 0;
1496 unsigned cnt = 0;
1497 tree v = make_vector (TYPE_VECTOR_SUBPARTS (type));
1498 TREE_TYPE (v) = type;
1499
1500 /* Iterate through elements and check for overflow. */
1501 for (cnt = 0; cnt < TYPE_VECTOR_SUBPARTS (type); ++cnt)
1502 {
1503 tree value = vals[cnt];
1504
1505 VECTOR_CST_ELT (v, cnt) = value;
1506
1507 /* Don't crash if we get an address constant. */
1508 if (!CONSTANT_CLASS_P (value))
1509 continue;
1510
1511 over |= TREE_OVERFLOW (value);
1512 }
1513
1514 TREE_OVERFLOW (v) = over;
1515 return v;
1516 }
1517
1518 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1519 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1520
1521 tree
1522 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1523 {
1524 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
1525 unsigned HOST_WIDE_INT idx;
1526 tree value;
1527
1528 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1529 vec[idx] = value;
1530 for (; idx < TYPE_VECTOR_SUBPARTS (type); ++idx)
1531 vec[idx] = build_zero_cst (TREE_TYPE (type));
1532
1533 return build_vector (type, vec);
1534 }
1535
1536 /* Build a vector of type VECTYPE where all the elements are SCs. */
1537 tree
1538 build_vector_from_val (tree vectype, tree sc)
1539 {
1540 int i, nunits = TYPE_VECTOR_SUBPARTS (vectype);
1541
1542 if (sc == error_mark_node)
1543 return sc;
1544
1545 /* Verify that the vector type is suitable for SC. Note that there
1546 is some inconsistency in the type-system with respect to restrict
1547 qualifications of pointers. Vector types always have a main-variant
1548 element type and the qualification is applied to the vector-type.
1549 So TREE_TYPE (vector-type) does not return a properly qualified
1550 vector element-type. */
1551 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1552 TREE_TYPE (vectype)));
1553
1554 if (CONSTANT_CLASS_P (sc))
1555 {
1556 tree *v = XALLOCAVEC (tree, nunits);
1557 for (i = 0; i < nunits; ++i)
1558 v[i] = sc;
1559 return build_vector (vectype, v);
1560 }
1561 else
1562 {
1563 vec<constructor_elt, va_gc> *v;
1564 vec_alloc (v, nunits);
1565 for (i = 0; i < nunits; ++i)
1566 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1567 return build_constructor (vectype, v);
1568 }
1569 }
1570
1571 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1572 are in the vec pointed to by VALS. */
1573 tree
1574 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
1575 {
1576 tree c = make_node (CONSTRUCTOR);
1577 unsigned int i;
1578 constructor_elt *elt;
1579 bool constant_p = true;
1580 bool side_effects_p = false;
1581
1582 TREE_TYPE (c) = type;
1583 CONSTRUCTOR_ELTS (c) = vals;
1584
1585 FOR_EACH_VEC_SAFE_ELT (vals, i, elt)
1586 {
1587 /* Mostly ctors will have elts that don't have side-effects, so
1588 the usual case is to scan all the elements. Hence a single
1589 loop for both const and side effects, rather than one loop
1590 each (with early outs). */
1591 if (!TREE_CONSTANT (elt->value))
1592 constant_p = false;
1593 if (TREE_SIDE_EFFECTS (elt->value))
1594 side_effects_p = true;
1595 }
1596
1597 TREE_SIDE_EFFECTS (c) = side_effects_p;
1598 TREE_CONSTANT (c) = constant_p;
1599
1600 return c;
1601 }
1602
1603 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1604 INDEX and VALUE. */
1605 tree
1606 build_constructor_single (tree type, tree index, tree value)
1607 {
1608 vec<constructor_elt, va_gc> *v;
1609 constructor_elt elt = {index, value};
1610
1611 vec_alloc (v, 1);
1612 v->quick_push (elt);
1613
1614 return build_constructor (type, v);
1615 }
1616
1617
1618 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1619 are in a list pointed to by VALS. */
1620 tree
1621 build_constructor_from_list (tree type, tree vals)
1622 {
1623 tree t;
1624 vec<constructor_elt, va_gc> *v = NULL;
1625
1626 if (vals)
1627 {
1628 vec_alloc (v, list_length (vals));
1629 for (t = vals; t; t = TREE_CHAIN (t))
1630 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
1631 }
1632
1633 return build_constructor (type, v);
1634 }
1635
1636 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1637 of elements, provided as index/value pairs. */
1638
1639 tree
1640 build_constructor_va (tree type, int nelts, ...)
1641 {
1642 vec<constructor_elt, va_gc> *v = NULL;
1643 va_list p;
1644
1645 va_start (p, nelts);
1646 vec_alloc (v, nelts);
1647 while (nelts--)
1648 {
1649 tree index = va_arg (p, tree);
1650 tree value = va_arg (p, tree);
1651 CONSTRUCTOR_APPEND_ELT (v, index, value);
1652 }
1653 va_end (p);
1654 return build_constructor (type, v);
1655 }
1656
1657 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1658
1659 tree
1660 build_fixed (tree type, FIXED_VALUE_TYPE f)
1661 {
1662 tree v;
1663 FIXED_VALUE_TYPE *fp;
1664
1665 v = make_node (FIXED_CST);
1666 fp = ggc_alloc_fixed_value ();
1667 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
1668
1669 TREE_TYPE (v) = type;
1670 TREE_FIXED_CST_PTR (v) = fp;
1671 return v;
1672 }
1673
1674 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1675
1676 tree
1677 build_real (tree type, REAL_VALUE_TYPE d)
1678 {
1679 tree v;
1680 REAL_VALUE_TYPE *dp;
1681 int overflow = 0;
1682
1683 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1684 Consider doing it via real_convert now. */
1685
1686 v = make_node (REAL_CST);
1687 dp = ggc_alloc_real_value ();
1688 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
1689
1690 TREE_TYPE (v) = type;
1691 TREE_REAL_CST_PTR (v) = dp;
1692 TREE_OVERFLOW (v) = overflow;
1693 return v;
1694 }
1695
1696 /* Return a new REAL_CST node whose type is TYPE
1697 and whose value is the integer value of the INTEGER_CST node I. */
1698
1699 REAL_VALUE_TYPE
1700 real_value_from_int_cst (const_tree type, const_tree i)
1701 {
1702 REAL_VALUE_TYPE d;
1703
1704 /* Clear all bits of the real value type so that we can later do
1705 bitwise comparisons to see if two values are the same. */
1706 memset (&d, 0, sizeof d);
1707
1708 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode,
1709 wide_int (i), TYPE_SIGN (TREE_TYPE (i)));
1710 return d;
1711 }
1712
1713 /* Given a tree representing an integer constant I, return a tree
1714 representing the same value as a floating-point constant of type TYPE. */
1715
1716 tree
1717 build_real_from_int_cst (tree type, const_tree i)
1718 {
1719 tree v;
1720 int overflow = TREE_OVERFLOW (i);
1721
1722 v = build_real (type, real_value_from_int_cst (type, i));
1723
1724 TREE_OVERFLOW (v) |= overflow;
1725 return v;
1726 }
1727
1728 /* Return a newly constructed STRING_CST node whose value is
1729 the LEN characters at STR.
1730 Note that for a C string literal, LEN should include the trailing NUL.
1731 The TREE_TYPE is not initialized. */
1732
1733 tree
1734 build_string (int len, const char *str)
1735 {
1736 tree s;
1737 size_t length;
1738
1739 /* Do not waste bytes provided by padding of struct tree_string. */
1740 length = len + offsetof (struct tree_string, str) + 1;
1741
1742 record_node_allocation_statistics (STRING_CST, length);
1743
1744 s = ggc_alloc_tree_node (length);
1745
1746 memset (s, 0, sizeof (struct tree_typed));
1747 TREE_SET_CODE (s, STRING_CST);
1748 TREE_CONSTANT (s) = 1;
1749 TREE_STRING_LENGTH (s) = len;
1750 memcpy (s->string.str, str, len);
1751 s->string.str[len] = '\0';
1752
1753 return s;
1754 }
1755
1756 /* Return a newly constructed COMPLEX_CST node whose value is
1757 specified by the real and imaginary parts REAL and IMAG.
1758 Both REAL and IMAG should be constant nodes. TYPE, if specified,
1759 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
1760
1761 tree
1762 build_complex (tree type, tree real, tree imag)
1763 {
1764 tree t = make_node (COMPLEX_CST);
1765
1766 TREE_REALPART (t) = real;
1767 TREE_IMAGPART (t) = imag;
1768 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
1769 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
1770 return t;
1771 }
1772
1773 /* Return a constant of arithmetic type TYPE which is the
1774 multiplicative identity of the set TYPE. */
1775
1776 tree
1777 build_one_cst (tree type)
1778 {
1779 switch (TREE_CODE (type))
1780 {
1781 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1782 case POINTER_TYPE: case REFERENCE_TYPE:
1783 case OFFSET_TYPE:
1784 return build_int_cst (type, 1);
1785
1786 case REAL_TYPE:
1787 return build_real (type, dconst1);
1788
1789 case FIXED_POINT_TYPE:
1790 /* We can only generate 1 for accum types. */
1791 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1792 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
1793
1794 case VECTOR_TYPE:
1795 {
1796 tree scalar = build_one_cst (TREE_TYPE (type));
1797
1798 return build_vector_from_val (type, scalar);
1799 }
1800
1801 case COMPLEX_TYPE:
1802 return build_complex (type,
1803 build_one_cst (TREE_TYPE (type)),
1804 build_zero_cst (TREE_TYPE (type)));
1805
1806 default:
1807 gcc_unreachable ();
1808 }
1809 }
1810
1811 /* Return an integer of type TYPE containing all 1's in as much precision as
1812 it contains, or a complex or vector whose subparts are such integers. */
1813
1814 tree
1815 build_all_ones_cst (tree type)
1816 {
1817 if (TREE_CODE (type) == COMPLEX_TYPE)
1818 {
1819 tree scalar = build_all_ones_cst (TREE_TYPE (type));
1820 return build_complex (type, scalar, scalar);
1821 }
1822 else
1823 return build_minus_one_cst (type);
1824 }
1825
1826 /* Return a constant of arithmetic type TYPE which is the
1827 opposite of the multiplicative identity of the set TYPE. */
1828
1829 tree
1830 build_minus_one_cst (tree type)
1831 {
1832 switch (TREE_CODE (type))
1833 {
1834 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1835 case POINTER_TYPE: case REFERENCE_TYPE:
1836 case OFFSET_TYPE:
1837 return build_int_cst (type, -1);
1838
1839 case REAL_TYPE:
1840 return build_real (type, dconstm1);
1841
1842 case FIXED_POINT_TYPE:
1843 /* We can only generate 1 for accum types. */
1844 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1845 return build_fixed (type, fixed_from_double_int (double_int_minus_one,
1846 TYPE_MODE (type)));
1847
1848 case VECTOR_TYPE:
1849 {
1850 tree scalar = build_minus_one_cst (TREE_TYPE (type));
1851
1852 return build_vector_from_val (type, scalar);
1853 }
1854
1855 case COMPLEX_TYPE:
1856 return build_complex (type,
1857 build_minus_one_cst (TREE_TYPE (type)),
1858 build_zero_cst (TREE_TYPE (type)));
1859
1860 default:
1861 gcc_unreachable ();
1862 }
1863 }
1864
1865 /* Build 0 constant of type TYPE. This is used by constructor folding
1866 and thus the constant should be represented in memory by
1867 zero(es). */
1868
1869 tree
1870 build_zero_cst (tree type)
1871 {
1872 switch (TREE_CODE (type))
1873 {
1874 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1875 case POINTER_TYPE: case REFERENCE_TYPE:
1876 case OFFSET_TYPE: case NULLPTR_TYPE:
1877 return build_int_cst (type, 0);
1878
1879 case REAL_TYPE:
1880 return build_real (type, dconst0);
1881
1882 case FIXED_POINT_TYPE:
1883 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
1884
1885 case VECTOR_TYPE:
1886 {
1887 tree scalar = build_zero_cst (TREE_TYPE (type));
1888
1889 return build_vector_from_val (type, scalar);
1890 }
1891
1892 case COMPLEX_TYPE:
1893 {
1894 tree zero = build_zero_cst (TREE_TYPE (type));
1895
1896 return build_complex (type, zero, zero);
1897 }
1898
1899 default:
1900 if (!AGGREGATE_TYPE_P (type))
1901 return fold_convert (type, integer_zero_node);
1902 return build_constructor (type, NULL);
1903 }
1904 }
1905
1906
1907 /* Build a BINFO with LEN language slots. */
1908
1909 tree
1910 make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL)
1911 {
1912 tree t;
1913 size_t length = (offsetof (struct tree_binfo, base_binfos)
1914 + vec<tree, va_gc>::embedded_size (base_binfos));
1915
1916 record_node_allocation_statistics (TREE_BINFO, length);
1917
1918 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1919
1920 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
1921
1922 TREE_SET_CODE (t, TREE_BINFO);
1923
1924 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
1925
1926 return t;
1927 }
1928
1929 /* Create a CASE_LABEL_EXPR tree node and return it. */
1930
1931 tree
1932 build_case_label (tree low_value, tree high_value, tree label_decl)
1933 {
1934 tree t = make_node (CASE_LABEL_EXPR);
1935
1936 TREE_TYPE (t) = void_type_node;
1937 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
1938
1939 CASE_LOW (t) = low_value;
1940 CASE_HIGH (t) = high_value;
1941 CASE_LABEL (t) = label_decl;
1942 CASE_CHAIN (t) = NULL_TREE;
1943
1944 return t;
1945 }
1946
1947 /* Build a newly constructed INETEGER_CST node of length LEN. */
1948
1949 tree
1950 make_int_cst_stat (int len, int ext_len MEM_STAT_DECL)
1951 {
1952 tree t;
1953 int length = (ext_len - 1) * sizeof (tree) + sizeof (struct tree_int_cst);
1954
1955 gcc_assert (len);
1956 record_node_allocation_statistics (INTEGER_CST, length);
1957
1958 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1959
1960 TREE_SET_CODE (t, INTEGER_CST);
1961 TREE_INT_CST_NUNITS (t) = len;
1962 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
1963
1964 TREE_CONSTANT (t) = 1;
1965
1966 return t;
1967 }
1968
1969 /* Build a newly constructed TREE_VEC node of length LEN. */
1970
1971 tree
1972 make_tree_vec_stat (int len MEM_STAT_DECL)
1973 {
1974 tree t;
1975 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
1976
1977 record_node_allocation_statistics (TREE_VEC, length);
1978
1979 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1980
1981 TREE_SET_CODE (t, TREE_VEC);
1982 TREE_VEC_LENGTH (t) = len;
1983
1984 return t;
1985 }
1986
1987 /* Grow a TREE_VEC node to new length LEN. */
1988
1989 tree
1990 grow_tree_vec_stat (tree v, int len MEM_STAT_DECL)
1991 {
1992 gcc_assert (TREE_CODE (v) == TREE_VEC);
1993
1994 int oldlen = TREE_VEC_LENGTH (v);
1995 gcc_assert (len > oldlen);
1996
1997 int oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
1998 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
1999
2000 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2001
2002 v = (tree) ggc_realloc_stat (v, length PASS_MEM_STAT);
2003
2004 TREE_VEC_LENGTH (v) = len;
2005
2006 return v;
2007 }
2008 \f
2009 /* Return 1 if EXPR is the integer constant zero or a complex constant
2010 of zero. */
2011
2012 int
2013 integer_zerop (const_tree expr)
2014 {
2015 STRIP_NOPS (expr);
2016
2017 switch (TREE_CODE (expr))
2018 {
2019 case INTEGER_CST:
2020 return wi::eq_p (expr, 0);
2021 case COMPLEX_CST:
2022 return (integer_zerop (TREE_REALPART (expr))
2023 && integer_zerop (TREE_IMAGPART (expr)));
2024 case VECTOR_CST:
2025 {
2026 unsigned i;
2027 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2028 if (!integer_zerop (VECTOR_CST_ELT (expr, i)))
2029 return false;
2030 return true;
2031 }
2032 default:
2033 return false;
2034 }
2035 }
2036
2037 /* Return 1 if EXPR is the integer constant one or the corresponding
2038 complex constant. */
2039
2040 int
2041 integer_onep (const_tree expr)
2042 {
2043 STRIP_NOPS (expr);
2044
2045 switch (TREE_CODE (expr))
2046 {
2047 case INTEGER_CST:
2048 return wi::eq_p (wi::to_widest (expr), 1);
2049 case COMPLEX_CST:
2050 return (integer_onep (TREE_REALPART (expr))
2051 && integer_zerop (TREE_IMAGPART (expr)));
2052 case VECTOR_CST:
2053 {
2054 unsigned i;
2055 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2056 if (!integer_onep (VECTOR_CST_ELT (expr, i)))
2057 return false;
2058 return true;
2059 }
2060 default:
2061 return false;
2062 }
2063 }
2064
2065 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2066 it contains, or a complex or vector whose subparts are such integers. */
2067
2068 int
2069 integer_all_onesp (const_tree expr)
2070 {
2071 STRIP_NOPS (expr);
2072
2073 if (TREE_CODE (expr) == COMPLEX_CST
2074 && integer_all_onesp (TREE_REALPART (expr))
2075 && integer_all_onesp (TREE_IMAGPART (expr)))
2076 return 1;
2077
2078 else if (TREE_CODE (expr) == VECTOR_CST)
2079 {
2080 unsigned i;
2081 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2082 if (!integer_all_onesp (VECTOR_CST_ELT (expr, i)))
2083 return 0;
2084 return 1;
2085 }
2086
2087 else if (TREE_CODE (expr) != INTEGER_CST)
2088 return 0;
2089
2090 return wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED) == expr;
2091 }
2092
2093 /* Return 1 if EXPR is the integer constant minus one. */
2094
2095 int
2096 integer_minus_onep (const_tree expr)
2097 {
2098 STRIP_NOPS (expr);
2099
2100 if (TREE_CODE (expr) == COMPLEX_CST)
2101 return (integer_all_onesp (TREE_REALPART (expr))
2102 && integer_zerop (TREE_IMAGPART (expr)));
2103 else
2104 return integer_all_onesp (expr);
2105 }
2106
2107 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2108 one bit on). */
2109
2110 int
2111 integer_pow2p (const_tree expr)
2112 {
2113 STRIP_NOPS (expr);
2114
2115 if (TREE_CODE (expr) == COMPLEX_CST
2116 && integer_pow2p (TREE_REALPART (expr))
2117 && integer_zerop (TREE_IMAGPART (expr)))
2118 return 1;
2119
2120 if (TREE_CODE (expr) != INTEGER_CST)
2121 return 0;
2122
2123 return wi::popcount (expr) == 1;
2124 }
2125
2126 /* Return 1 if EXPR is an integer constant other than zero or a
2127 complex constant other than zero. */
2128
2129 int
2130 integer_nonzerop (const_tree expr)
2131 {
2132 STRIP_NOPS (expr);
2133
2134 return ((TREE_CODE (expr) == INTEGER_CST
2135 && !wi::eq_p (expr, 0))
2136 || (TREE_CODE (expr) == COMPLEX_CST
2137 && (integer_nonzerop (TREE_REALPART (expr))
2138 || integer_nonzerop (TREE_IMAGPART (expr)))));
2139 }
2140
2141 /* Return 1 if EXPR is the fixed-point constant zero. */
2142
2143 int
2144 fixed_zerop (const_tree expr)
2145 {
2146 return (TREE_CODE (expr) == FIXED_CST
2147 && TREE_FIXED_CST (expr).data.is_zero ());
2148 }
2149
2150 /* Return the power of two represented by a tree node known to be a
2151 power of two. */
2152
2153 int
2154 tree_log2 (const_tree expr)
2155 {
2156 STRIP_NOPS (expr);
2157
2158 if (TREE_CODE (expr) == COMPLEX_CST)
2159 return tree_log2 (TREE_REALPART (expr));
2160
2161 return wi::exact_log2 (expr);
2162 }
2163
2164 /* Similar, but return the largest integer Y such that 2 ** Y is less
2165 than or equal to EXPR. */
2166
2167 int
2168 tree_floor_log2 (const_tree expr)
2169 {
2170 STRIP_NOPS (expr);
2171
2172 if (TREE_CODE (expr) == COMPLEX_CST)
2173 return tree_log2 (TREE_REALPART (expr));
2174
2175 return wi::floor_log2 (expr);
2176 }
2177
2178 /* Return number of known trailing zero bits in EXPR, or, if the value of
2179 EXPR is known to be zero, the precision of it's type. */
2180
2181 unsigned int
2182 tree_ctz (const_tree expr)
2183 {
2184 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2185 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2186 return 0;
2187
2188 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2189 switch (TREE_CODE (expr))
2190 {
2191 case INTEGER_CST:
2192 ret1 = wi::ctz (expr);
2193 return MIN (ret1, prec);
2194 case SSA_NAME:
2195 ret1 = wi::ctz (get_nonzero_bits (expr));
2196 return MIN (ret1, prec);
2197 case PLUS_EXPR:
2198 case MINUS_EXPR:
2199 case BIT_IOR_EXPR:
2200 case BIT_XOR_EXPR:
2201 case MIN_EXPR:
2202 case MAX_EXPR:
2203 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2204 if (ret1 == 0)
2205 return ret1;
2206 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2207 return MIN (ret1, ret2);
2208 case POINTER_PLUS_EXPR:
2209 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2210 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2211 /* Second operand is sizetype, which could be in theory
2212 wider than pointer's precision. Make sure we never
2213 return more than prec. */
2214 ret2 = MIN (ret2, prec);
2215 return MIN (ret1, ret2);
2216 case BIT_AND_EXPR:
2217 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2218 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2219 return MAX (ret1, ret2);
2220 case MULT_EXPR:
2221 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2222 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2223 return MIN (ret1 + ret2, prec);
2224 case LSHIFT_EXPR:
2225 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2226 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2227 && ((unsigned HOST_WIDE_INT) tree_to_uhwi (TREE_OPERAND (expr, 1))
2228 < (unsigned HOST_WIDE_INT) prec))
2229 {
2230 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2231 return MIN (ret1 + ret2, prec);
2232 }
2233 return ret1;
2234 case RSHIFT_EXPR:
2235 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2236 && ((unsigned HOST_WIDE_INT) tree_to_uhwi (TREE_OPERAND (expr, 1))
2237 < (unsigned HOST_WIDE_INT) prec))
2238 {
2239 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2240 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2241 if (ret1 > ret2)
2242 return ret1 - ret2;
2243 }
2244 return 0;
2245 case TRUNC_DIV_EXPR:
2246 case CEIL_DIV_EXPR:
2247 case FLOOR_DIV_EXPR:
2248 case ROUND_DIV_EXPR:
2249 case EXACT_DIV_EXPR:
2250 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2251 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2252 {
2253 int l = tree_log2 (TREE_OPERAND (expr, 1));
2254 if (l >= 0)
2255 {
2256 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2257 ret2 = l;
2258 if (ret1 > ret2)
2259 return ret1 - ret2;
2260 }
2261 }
2262 return 0;
2263 CASE_CONVERT:
2264 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2265 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2266 ret1 = prec;
2267 return MIN (ret1, prec);
2268 case SAVE_EXPR:
2269 return tree_ctz (TREE_OPERAND (expr, 0));
2270 case COND_EXPR:
2271 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2272 if (ret1 == 0)
2273 return 0;
2274 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2275 return MIN (ret1, ret2);
2276 case COMPOUND_EXPR:
2277 return tree_ctz (TREE_OPERAND (expr, 1));
2278 case ADDR_EXPR:
2279 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2280 if (ret1 > BITS_PER_UNIT)
2281 {
2282 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2283 return MIN (ret1, prec);
2284 }
2285 return 0;
2286 default:
2287 return 0;
2288 }
2289 }
2290
2291 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2292 decimal float constants, so don't return 1 for them. */
2293
2294 int
2295 real_zerop (const_tree expr)
2296 {
2297 STRIP_NOPS (expr);
2298
2299 switch (TREE_CODE (expr))
2300 {
2301 case REAL_CST:
2302 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst0)
2303 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2304 case COMPLEX_CST:
2305 return real_zerop (TREE_REALPART (expr))
2306 && real_zerop (TREE_IMAGPART (expr));
2307 case VECTOR_CST:
2308 {
2309 unsigned i;
2310 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2311 if (!real_zerop (VECTOR_CST_ELT (expr, i)))
2312 return false;
2313 return true;
2314 }
2315 default:
2316 return false;
2317 }
2318 }
2319
2320 /* Return 1 if EXPR is the real constant one in real or complex form.
2321 Trailing zeroes matter for decimal float constants, so don't return
2322 1 for them. */
2323
2324 int
2325 real_onep (const_tree expr)
2326 {
2327 STRIP_NOPS (expr);
2328
2329 switch (TREE_CODE (expr))
2330 {
2331 case REAL_CST:
2332 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst1)
2333 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2334 case COMPLEX_CST:
2335 return real_onep (TREE_REALPART (expr))
2336 && real_zerop (TREE_IMAGPART (expr));
2337 case VECTOR_CST:
2338 {
2339 unsigned i;
2340 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2341 if (!real_onep (VECTOR_CST_ELT (expr, i)))
2342 return false;
2343 return true;
2344 }
2345 default:
2346 return false;
2347 }
2348 }
2349
2350 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2351 matter for decimal float constants, so don't return 1 for them. */
2352
2353 int
2354 real_minus_onep (const_tree expr)
2355 {
2356 STRIP_NOPS (expr);
2357
2358 switch (TREE_CODE (expr))
2359 {
2360 case REAL_CST:
2361 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconstm1)
2362 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2363 case COMPLEX_CST:
2364 return real_minus_onep (TREE_REALPART (expr))
2365 && real_zerop (TREE_IMAGPART (expr));
2366 case VECTOR_CST:
2367 {
2368 unsigned i;
2369 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2370 if (!real_minus_onep (VECTOR_CST_ELT (expr, i)))
2371 return false;
2372 return true;
2373 }
2374 default:
2375 return false;
2376 }
2377 }
2378
2379 /* Nonzero if EXP is a constant or a cast of a constant. */
2380
2381 int
2382 really_constant_p (const_tree exp)
2383 {
2384 /* This is not quite the same as STRIP_NOPS. It does more. */
2385 while (CONVERT_EXPR_P (exp)
2386 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2387 exp = TREE_OPERAND (exp, 0);
2388 return TREE_CONSTANT (exp);
2389 }
2390 \f
2391 /* Return first list element whose TREE_VALUE is ELEM.
2392 Return 0 if ELEM is not in LIST. */
2393
2394 tree
2395 value_member (tree elem, tree list)
2396 {
2397 while (list)
2398 {
2399 if (elem == TREE_VALUE (list))
2400 return list;
2401 list = TREE_CHAIN (list);
2402 }
2403 return NULL_TREE;
2404 }
2405
2406 /* Return first list element whose TREE_PURPOSE is ELEM.
2407 Return 0 if ELEM is not in LIST. */
2408
2409 tree
2410 purpose_member (const_tree elem, tree list)
2411 {
2412 while (list)
2413 {
2414 if (elem == TREE_PURPOSE (list))
2415 return list;
2416 list = TREE_CHAIN (list);
2417 }
2418 return NULL_TREE;
2419 }
2420
2421 /* Return true if ELEM is in V. */
2422
2423 bool
2424 vec_member (const_tree elem, vec<tree, va_gc> *v)
2425 {
2426 unsigned ix;
2427 tree t;
2428 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2429 if (elem == t)
2430 return true;
2431 return false;
2432 }
2433
2434 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2435 NULL_TREE. */
2436
2437 tree
2438 chain_index (int idx, tree chain)
2439 {
2440 for (; chain && idx > 0; --idx)
2441 chain = TREE_CHAIN (chain);
2442 return chain;
2443 }
2444
2445 /* Return nonzero if ELEM is part of the chain CHAIN. */
2446
2447 int
2448 chain_member (const_tree elem, const_tree chain)
2449 {
2450 while (chain)
2451 {
2452 if (elem == chain)
2453 return 1;
2454 chain = DECL_CHAIN (chain);
2455 }
2456
2457 return 0;
2458 }
2459
2460 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2461 We expect a null pointer to mark the end of the chain.
2462 This is the Lisp primitive `length'. */
2463
2464 int
2465 list_length (const_tree t)
2466 {
2467 const_tree p = t;
2468 #ifdef ENABLE_TREE_CHECKING
2469 const_tree q = t;
2470 #endif
2471 int len = 0;
2472
2473 while (p)
2474 {
2475 p = TREE_CHAIN (p);
2476 #ifdef ENABLE_TREE_CHECKING
2477 if (len % 2)
2478 q = TREE_CHAIN (q);
2479 gcc_assert (p != q);
2480 #endif
2481 len++;
2482 }
2483
2484 return len;
2485 }
2486
2487 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2488 UNION_TYPE TYPE, or NULL_TREE if none. */
2489
2490 tree
2491 first_field (const_tree type)
2492 {
2493 tree t = TYPE_FIELDS (type);
2494 while (t && TREE_CODE (t) != FIELD_DECL)
2495 t = TREE_CHAIN (t);
2496 return t;
2497 }
2498
2499 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2500 by modifying the last node in chain 1 to point to chain 2.
2501 This is the Lisp primitive `nconc'. */
2502
2503 tree
2504 chainon (tree op1, tree op2)
2505 {
2506 tree t1;
2507
2508 if (!op1)
2509 return op2;
2510 if (!op2)
2511 return op1;
2512
2513 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
2514 continue;
2515 TREE_CHAIN (t1) = op2;
2516
2517 #ifdef ENABLE_TREE_CHECKING
2518 {
2519 tree t2;
2520 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
2521 gcc_assert (t2 != t1);
2522 }
2523 #endif
2524
2525 return op1;
2526 }
2527
2528 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2529
2530 tree
2531 tree_last (tree chain)
2532 {
2533 tree next;
2534 if (chain)
2535 while ((next = TREE_CHAIN (chain)))
2536 chain = next;
2537 return chain;
2538 }
2539
2540 /* Reverse the order of elements in the chain T,
2541 and return the new head of the chain (old last element). */
2542
2543 tree
2544 nreverse (tree t)
2545 {
2546 tree prev = 0, decl, next;
2547 for (decl = t; decl; decl = next)
2548 {
2549 /* We shouldn't be using this function to reverse BLOCK chains; we
2550 have blocks_nreverse for that. */
2551 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
2552 next = TREE_CHAIN (decl);
2553 TREE_CHAIN (decl) = prev;
2554 prev = decl;
2555 }
2556 return prev;
2557 }
2558 \f
2559 /* Return a newly created TREE_LIST node whose
2560 purpose and value fields are PARM and VALUE. */
2561
2562 tree
2563 build_tree_list_stat (tree parm, tree value MEM_STAT_DECL)
2564 {
2565 tree t = make_node_stat (TREE_LIST PASS_MEM_STAT);
2566 TREE_PURPOSE (t) = parm;
2567 TREE_VALUE (t) = value;
2568 return t;
2569 }
2570
2571 /* Build a chain of TREE_LIST nodes from a vector. */
2572
2573 tree
2574 build_tree_list_vec_stat (const vec<tree, va_gc> *vec MEM_STAT_DECL)
2575 {
2576 tree ret = NULL_TREE;
2577 tree *pp = &ret;
2578 unsigned int i;
2579 tree t;
2580 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
2581 {
2582 *pp = build_tree_list_stat (NULL, t PASS_MEM_STAT);
2583 pp = &TREE_CHAIN (*pp);
2584 }
2585 return ret;
2586 }
2587
2588 /* Return a newly created TREE_LIST node whose
2589 purpose and value fields are PURPOSE and VALUE
2590 and whose TREE_CHAIN is CHAIN. */
2591
2592 tree
2593 tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL)
2594 {
2595 tree node;
2596
2597 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
2598 memset (node, 0, sizeof (struct tree_common));
2599
2600 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
2601
2602 TREE_SET_CODE (node, TREE_LIST);
2603 TREE_CHAIN (node) = chain;
2604 TREE_PURPOSE (node) = purpose;
2605 TREE_VALUE (node) = value;
2606 return node;
2607 }
2608
2609 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2610 trees. */
2611
2612 vec<tree, va_gc> *
2613 ctor_to_vec (tree ctor)
2614 {
2615 vec<tree, va_gc> *vec;
2616 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
2617 unsigned int ix;
2618 tree val;
2619
2620 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
2621 vec->quick_push (val);
2622
2623 return vec;
2624 }
2625 \f
2626 /* Return the size nominally occupied by an object of type TYPE
2627 when it resides in memory. The value is measured in units of bytes,
2628 and its data type is that normally used for type sizes
2629 (which is the first type created by make_signed_type or
2630 make_unsigned_type). */
2631
2632 tree
2633 size_in_bytes (const_tree type)
2634 {
2635 tree t;
2636
2637 if (type == error_mark_node)
2638 return integer_zero_node;
2639
2640 type = TYPE_MAIN_VARIANT (type);
2641 t = TYPE_SIZE_UNIT (type);
2642
2643 if (t == 0)
2644 {
2645 lang_hooks.types.incomplete_type_error (NULL_TREE, type);
2646 return size_zero_node;
2647 }
2648
2649 return t;
2650 }
2651
2652 /* Return the size of TYPE (in bytes) as a wide integer
2653 or return -1 if the size can vary or is larger than an integer. */
2654
2655 HOST_WIDE_INT
2656 int_size_in_bytes (const_tree type)
2657 {
2658 tree t;
2659
2660 if (type == error_mark_node)
2661 return 0;
2662
2663 type = TYPE_MAIN_VARIANT (type);
2664 t = TYPE_SIZE_UNIT (type);
2665
2666 if (t && cst_fits_uhwi_p (t))
2667 return tree_to_hwi (t);
2668 else
2669 return -1;
2670 }
2671
2672 /* Return the maximum size of TYPE (in bytes) as a wide integer
2673 or return -1 if the size can vary or is larger than an integer. */
2674
2675 HOST_WIDE_INT
2676 max_int_size_in_bytes (const_tree type)
2677 {
2678 HOST_WIDE_INT size = -1;
2679 tree size_tree;
2680
2681 /* If this is an array type, check for a possible MAX_SIZE attached. */
2682
2683 if (TREE_CODE (type) == ARRAY_TYPE)
2684 {
2685 size_tree = TYPE_ARRAY_MAX_SIZE (type);
2686
2687 if (size_tree && tree_fits_uhwi_p (size_tree))
2688 size = tree_to_uhwi (size_tree);
2689 }
2690
2691 /* If we still haven't been able to get a size, see if the language
2692 can compute a maximum size. */
2693
2694 if (size == -1)
2695 {
2696 size_tree = lang_hooks.types.max_size (type);
2697
2698 if (size_tree && tree_fits_uhwi_p (size_tree))
2699 size = tree_to_uhwi (size_tree);
2700 }
2701
2702 return size;
2703 }
2704 \f
2705 /* Return the bit position of FIELD, in bits from the start of the record.
2706 This is a tree of type bitsizetype. */
2707
2708 tree
2709 bit_position (const_tree field)
2710 {
2711 return bit_from_pos (DECL_FIELD_OFFSET (field),
2712 DECL_FIELD_BIT_OFFSET (field));
2713 }
2714
2715 /* Likewise, but return as an integer. It must be representable in
2716 that way (since it could be a signed value, we don't have the
2717 option of returning -1 like int_size_in_byte can. */
2718
2719 HOST_WIDE_INT
2720 int_bit_position (const_tree field)
2721 {
2722 return tree_to_shwi (bit_position (field));
2723 }
2724 \f
2725 /* Return the byte position of FIELD, in bytes from the start of the record.
2726 This is a tree of type sizetype. */
2727
2728 tree
2729 byte_position (const_tree field)
2730 {
2731 return byte_from_pos (DECL_FIELD_OFFSET (field),
2732 DECL_FIELD_BIT_OFFSET (field));
2733 }
2734
2735 /* Likewise, but return as an integer. It must be representable in
2736 that way (since it could be a signed value, we don't have the
2737 option of returning -1 like int_size_in_byte can. */
2738
2739 HOST_WIDE_INT
2740 int_byte_position (const_tree field)
2741 {
2742 return tree_to_shwi (byte_position (field));
2743 }
2744 \f
2745 /* Return the strictest alignment, in bits, that T is known to have. */
2746
2747 unsigned int
2748 expr_align (const_tree t)
2749 {
2750 unsigned int align0, align1;
2751
2752 switch (TREE_CODE (t))
2753 {
2754 CASE_CONVERT: case NON_LVALUE_EXPR:
2755 /* If we have conversions, we know that the alignment of the
2756 object must meet each of the alignments of the types. */
2757 align0 = expr_align (TREE_OPERAND (t, 0));
2758 align1 = TYPE_ALIGN (TREE_TYPE (t));
2759 return MAX (align0, align1);
2760
2761 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
2762 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
2763 case CLEANUP_POINT_EXPR:
2764 /* These don't change the alignment of an object. */
2765 return expr_align (TREE_OPERAND (t, 0));
2766
2767 case COND_EXPR:
2768 /* The best we can do is say that the alignment is the least aligned
2769 of the two arms. */
2770 align0 = expr_align (TREE_OPERAND (t, 1));
2771 align1 = expr_align (TREE_OPERAND (t, 2));
2772 return MIN (align0, align1);
2773
2774 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
2775 meaningfully, it's always 1. */
2776 case LABEL_DECL: case CONST_DECL:
2777 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
2778 case FUNCTION_DECL:
2779 gcc_assert (DECL_ALIGN (t) != 0);
2780 return DECL_ALIGN (t);
2781
2782 default:
2783 break;
2784 }
2785
2786 /* Otherwise take the alignment from that of the type. */
2787 return TYPE_ALIGN (TREE_TYPE (t));
2788 }
2789 \f
2790 /* Return, as a tree node, the number of elements for TYPE (which is an
2791 ARRAY_TYPE) minus one. This counts only elements of the top array. */
2792
2793 tree
2794 array_type_nelts (const_tree type)
2795 {
2796 tree index_type, min, max;
2797
2798 /* If they did it with unspecified bounds, then we should have already
2799 given an error about it before we got here. */
2800 if (! TYPE_DOMAIN (type))
2801 return error_mark_node;
2802
2803 index_type = TYPE_DOMAIN (type);
2804 min = TYPE_MIN_VALUE (index_type);
2805 max = TYPE_MAX_VALUE (index_type);
2806
2807 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
2808 if (!max)
2809 return error_mark_node;
2810
2811 return (integer_zerop (min)
2812 ? max
2813 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
2814 }
2815 \f
2816 /* If arg is static -- a reference to an object in static storage -- then
2817 return the object. This is not the same as the C meaning of `static'.
2818 If arg isn't static, return NULL. */
2819
2820 tree
2821 staticp (tree arg)
2822 {
2823 switch (TREE_CODE (arg))
2824 {
2825 case FUNCTION_DECL:
2826 /* Nested functions are static, even though taking their address will
2827 involve a trampoline as we unnest the nested function and create
2828 the trampoline on the tree level. */
2829 return arg;
2830
2831 case VAR_DECL:
2832 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
2833 && ! DECL_THREAD_LOCAL_P (arg)
2834 && ! DECL_DLLIMPORT_P (arg)
2835 ? arg : NULL);
2836
2837 case CONST_DECL:
2838 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
2839 ? arg : NULL);
2840
2841 case CONSTRUCTOR:
2842 return TREE_STATIC (arg) ? arg : NULL;
2843
2844 case LABEL_DECL:
2845 case STRING_CST:
2846 return arg;
2847
2848 case COMPONENT_REF:
2849 /* If the thing being referenced is not a field, then it is
2850 something language specific. */
2851 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
2852
2853 /* If we are referencing a bitfield, we can't evaluate an
2854 ADDR_EXPR at compile time and so it isn't a constant. */
2855 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
2856 return NULL;
2857
2858 return staticp (TREE_OPERAND (arg, 0));
2859
2860 case BIT_FIELD_REF:
2861 return NULL;
2862
2863 case INDIRECT_REF:
2864 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
2865
2866 case ARRAY_REF:
2867 case ARRAY_RANGE_REF:
2868 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
2869 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
2870 return staticp (TREE_OPERAND (arg, 0));
2871 else
2872 return NULL;
2873
2874 case COMPOUND_LITERAL_EXPR:
2875 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
2876
2877 default:
2878 return NULL;
2879 }
2880 }
2881
2882 \f
2883
2884
2885 /* Return whether OP is a DECL whose address is function-invariant. */
2886
2887 bool
2888 decl_address_invariant_p (const_tree op)
2889 {
2890 /* The conditions below are slightly less strict than the one in
2891 staticp. */
2892
2893 switch (TREE_CODE (op))
2894 {
2895 case PARM_DECL:
2896 case RESULT_DECL:
2897 case LABEL_DECL:
2898 case FUNCTION_DECL:
2899 return true;
2900
2901 case VAR_DECL:
2902 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
2903 || DECL_THREAD_LOCAL_P (op)
2904 || DECL_CONTEXT (op) == current_function_decl
2905 || decl_function_context (op) == current_function_decl)
2906 return true;
2907 break;
2908
2909 case CONST_DECL:
2910 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
2911 || decl_function_context (op) == current_function_decl)
2912 return true;
2913 break;
2914
2915 default:
2916 break;
2917 }
2918
2919 return false;
2920 }
2921
2922 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
2923
2924 bool
2925 decl_address_ip_invariant_p (const_tree op)
2926 {
2927 /* The conditions below are slightly less strict than the one in
2928 staticp. */
2929
2930 switch (TREE_CODE (op))
2931 {
2932 case LABEL_DECL:
2933 case FUNCTION_DECL:
2934 case STRING_CST:
2935 return true;
2936
2937 case VAR_DECL:
2938 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
2939 && !DECL_DLLIMPORT_P (op))
2940 || DECL_THREAD_LOCAL_P (op))
2941 return true;
2942 break;
2943
2944 case CONST_DECL:
2945 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
2946 return true;
2947 break;
2948
2949 default:
2950 break;
2951 }
2952
2953 return false;
2954 }
2955
2956
2957 /* Return true if T is function-invariant (internal function, does
2958 not handle arithmetic; that's handled in skip_simple_arithmetic and
2959 tree_invariant_p). */
2960
2961 static bool tree_invariant_p (tree t);
2962
2963 static bool
2964 tree_invariant_p_1 (tree t)
2965 {
2966 tree op;
2967
2968 if (TREE_CONSTANT (t)
2969 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
2970 return true;
2971
2972 switch (TREE_CODE (t))
2973 {
2974 case SAVE_EXPR:
2975 return true;
2976
2977 case ADDR_EXPR:
2978 op = TREE_OPERAND (t, 0);
2979 while (handled_component_p (op))
2980 {
2981 switch (TREE_CODE (op))
2982 {
2983 case ARRAY_REF:
2984 case ARRAY_RANGE_REF:
2985 if (!tree_invariant_p (TREE_OPERAND (op, 1))
2986 || TREE_OPERAND (op, 2) != NULL_TREE
2987 || TREE_OPERAND (op, 3) != NULL_TREE)
2988 return false;
2989 break;
2990
2991 case COMPONENT_REF:
2992 if (TREE_OPERAND (op, 2) != NULL_TREE)
2993 return false;
2994 break;
2995
2996 default:;
2997 }
2998 op = TREE_OPERAND (op, 0);
2999 }
3000
3001 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3002
3003 default:
3004 break;
3005 }
3006
3007 return false;
3008 }
3009
3010 /* Return true if T is function-invariant. */
3011
3012 static bool
3013 tree_invariant_p (tree t)
3014 {
3015 tree inner = skip_simple_arithmetic (t);
3016 return tree_invariant_p_1 (inner);
3017 }
3018
3019 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3020 Do this to any expression which may be used in more than one place,
3021 but must be evaluated only once.
3022
3023 Normally, expand_expr would reevaluate the expression each time.
3024 Calling save_expr produces something that is evaluated and recorded
3025 the first time expand_expr is called on it. Subsequent calls to
3026 expand_expr just reuse the recorded value.
3027
3028 The call to expand_expr that generates code that actually computes
3029 the value is the first call *at compile time*. Subsequent calls
3030 *at compile time* generate code to use the saved value.
3031 This produces correct result provided that *at run time* control
3032 always flows through the insns made by the first expand_expr
3033 before reaching the other places where the save_expr was evaluated.
3034 You, the caller of save_expr, must make sure this is so.
3035
3036 Constants, and certain read-only nodes, are returned with no
3037 SAVE_EXPR because that is safe. Expressions containing placeholders
3038 are not touched; see tree.def for an explanation of what these
3039 are used for. */
3040
3041 tree
3042 save_expr (tree expr)
3043 {
3044 tree t = fold (expr);
3045 tree inner;
3046
3047 /* If the tree evaluates to a constant, then we don't want to hide that
3048 fact (i.e. this allows further folding, and direct checks for constants).
3049 However, a read-only object that has side effects cannot be bypassed.
3050 Since it is no problem to reevaluate literals, we just return the
3051 literal node. */
3052 inner = skip_simple_arithmetic (t);
3053 if (TREE_CODE (inner) == ERROR_MARK)
3054 return inner;
3055
3056 if (tree_invariant_p_1 (inner))
3057 return t;
3058
3059 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3060 it means that the size or offset of some field of an object depends on
3061 the value within another field.
3062
3063 Note that it must not be the case that T contains both a PLACEHOLDER_EXPR
3064 and some variable since it would then need to be both evaluated once and
3065 evaluated more than once. Front-ends must assure this case cannot
3066 happen by surrounding any such subexpressions in their own SAVE_EXPR
3067 and forcing evaluation at the proper time. */
3068 if (contains_placeholder_p (inner))
3069 return t;
3070
3071 t = build1 (SAVE_EXPR, TREE_TYPE (expr), t);
3072 SET_EXPR_LOCATION (t, EXPR_LOCATION (expr));
3073
3074 /* This expression might be placed ahead of a jump to ensure that the
3075 value was computed on both sides of the jump. So make sure it isn't
3076 eliminated as dead. */
3077 TREE_SIDE_EFFECTS (t) = 1;
3078 return t;
3079 }
3080
3081 /* Look inside EXPR into any simple arithmetic operations. Return the
3082 outermost non-arithmetic or non-invariant node. */
3083
3084 tree
3085 skip_simple_arithmetic (tree expr)
3086 {
3087 /* We don't care about whether this can be used as an lvalue in this
3088 context. */
3089 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3090 expr = TREE_OPERAND (expr, 0);
3091
3092 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3093 a constant, it will be more efficient to not make another SAVE_EXPR since
3094 it will allow better simplification and GCSE will be able to merge the
3095 computations if they actually occur. */
3096 while (true)
3097 {
3098 if (UNARY_CLASS_P (expr))
3099 expr = TREE_OPERAND (expr, 0);
3100 else if (BINARY_CLASS_P (expr))
3101 {
3102 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3103 expr = TREE_OPERAND (expr, 0);
3104 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3105 expr = TREE_OPERAND (expr, 1);
3106 else
3107 break;
3108 }
3109 else
3110 break;
3111 }
3112
3113 return expr;
3114 }
3115
3116 /* Look inside EXPR into simple arithmetic operations involving constants.
3117 Return the outermost non-arithmetic or non-constant node. */
3118
3119 tree
3120 skip_simple_constant_arithmetic (tree expr)
3121 {
3122 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3123 expr = TREE_OPERAND (expr, 0);
3124
3125 while (true)
3126 {
3127 if (UNARY_CLASS_P (expr))
3128 expr = TREE_OPERAND (expr, 0);
3129 else if (BINARY_CLASS_P (expr))
3130 {
3131 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3132 expr = TREE_OPERAND (expr, 0);
3133 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3134 expr = TREE_OPERAND (expr, 1);
3135 else
3136 break;
3137 }
3138 else
3139 break;
3140 }
3141
3142 return expr;
3143 }
3144
3145 /* Return which tree structure is used by T. */
3146
3147 enum tree_node_structure_enum
3148 tree_node_structure (const_tree t)
3149 {
3150 const enum tree_code code = TREE_CODE (t);
3151 return tree_node_structure_for_code (code);
3152 }
3153
3154 /* Set various status flags when building a CALL_EXPR object T. */
3155
3156 static void
3157 process_call_operands (tree t)
3158 {
3159 bool side_effects = TREE_SIDE_EFFECTS (t);
3160 bool read_only = false;
3161 int i = call_expr_flags (t);
3162
3163 /* Calls have side-effects, except those to const or pure functions. */
3164 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3165 side_effects = true;
3166 /* Propagate TREE_READONLY of arguments for const functions. */
3167 if (i & ECF_CONST)
3168 read_only = true;
3169
3170 if (!side_effects || read_only)
3171 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3172 {
3173 tree op = TREE_OPERAND (t, i);
3174 if (op && TREE_SIDE_EFFECTS (op))
3175 side_effects = true;
3176 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3177 read_only = false;
3178 }
3179
3180 TREE_SIDE_EFFECTS (t) = side_effects;
3181 TREE_READONLY (t) = read_only;
3182 }
3183 \f
3184 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3185 size or offset that depends on a field within a record. */
3186
3187 bool
3188 contains_placeholder_p (const_tree exp)
3189 {
3190 enum tree_code code;
3191
3192 if (!exp)
3193 return 0;
3194
3195 code = TREE_CODE (exp);
3196 if (code == PLACEHOLDER_EXPR)
3197 return 1;
3198
3199 switch (TREE_CODE_CLASS (code))
3200 {
3201 case tcc_reference:
3202 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3203 position computations since they will be converted into a
3204 WITH_RECORD_EXPR involving the reference, which will assume
3205 here will be valid. */
3206 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3207
3208 case tcc_exceptional:
3209 if (code == TREE_LIST)
3210 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3211 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3212 break;
3213
3214 case tcc_unary:
3215 case tcc_binary:
3216 case tcc_comparison:
3217 case tcc_expression:
3218 switch (code)
3219 {
3220 case COMPOUND_EXPR:
3221 /* Ignoring the first operand isn't quite right, but works best. */
3222 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3223
3224 case COND_EXPR:
3225 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3226 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3227 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3228
3229 case SAVE_EXPR:
3230 /* The save_expr function never wraps anything containing
3231 a PLACEHOLDER_EXPR. */
3232 return 0;
3233
3234 default:
3235 break;
3236 }
3237
3238 switch (TREE_CODE_LENGTH (code))
3239 {
3240 case 1:
3241 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3242 case 2:
3243 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3244 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3245 default:
3246 return 0;
3247 }
3248
3249 case tcc_vl_exp:
3250 switch (code)
3251 {
3252 case CALL_EXPR:
3253 {
3254 const_tree arg;
3255 const_call_expr_arg_iterator iter;
3256 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3257 if (CONTAINS_PLACEHOLDER_P (arg))
3258 return 1;
3259 return 0;
3260 }
3261 default:
3262 return 0;
3263 }
3264
3265 default:
3266 return 0;
3267 }
3268 return 0;
3269 }
3270
3271 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3272 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3273 field positions. */
3274
3275 static bool
3276 type_contains_placeholder_1 (const_tree type)
3277 {
3278 /* If the size contains a placeholder or the parent type (component type in
3279 the case of arrays) type involves a placeholder, this type does. */
3280 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3281 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3282 || (!POINTER_TYPE_P (type)
3283 && TREE_TYPE (type)
3284 && type_contains_placeholder_p (TREE_TYPE (type))))
3285 return true;
3286
3287 /* Now do type-specific checks. Note that the last part of the check above
3288 greatly limits what we have to do below. */
3289 switch (TREE_CODE (type))
3290 {
3291 case VOID_TYPE:
3292 case POINTER_BOUNDS_TYPE:
3293 case COMPLEX_TYPE:
3294 case ENUMERAL_TYPE:
3295 case BOOLEAN_TYPE:
3296 case POINTER_TYPE:
3297 case OFFSET_TYPE:
3298 case REFERENCE_TYPE:
3299 case METHOD_TYPE:
3300 case FUNCTION_TYPE:
3301 case VECTOR_TYPE:
3302 case NULLPTR_TYPE:
3303 return false;
3304
3305 case INTEGER_TYPE:
3306 case REAL_TYPE:
3307 case FIXED_POINT_TYPE:
3308 /* Here we just check the bounds. */
3309 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3310 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3311
3312 case ARRAY_TYPE:
3313 /* We have already checked the component type above, so just check the
3314 domain type. */
3315 return type_contains_placeholder_p (TYPE_DOMAIN (type));
3316
3317 case RECORD_TYPE:
3318 case UNION_TYPE:
3319 case QUAL_UNION_TYPE:
3320 {
3321 tree field;
3322
3323 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3324 if (TREE_CODE (field) == FIELD_DECL
3325 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3326 || (TREE_CODE (type) == QUAL_UNION_TYPE
3327 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3328 || type_contains_placeholder_p (TREE_TYPE (field))))
3329 return true;
3330
3331 return false;
3332 }
3333
3334 default:
3335 gcc_unreachable ();
3336 }
3337 }
3338
3339 /* Wrapper around above function used to cache its result. */
3340
3341 bool
3342 type_contains_placeholder_p (tree type)
3343 {
3344 bool result;
3345
3346 /* If the contains_placeholder_bits field has been initialized,
3347 then we know the answer. */
3348 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3349 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3350
3351 /* Indicate that we've seen this type node, and the answer is false.
3352 This is what we want to return if we run into recursion via fields. */
3353 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3354
3355 /* Compute the real value. */
3356 result = type_contains_placeholder_1 (type);
3357
3358 /* Store the real value. */
3359 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3360
3361 return result;
3362 }
3363 \f
3364 /* Push tree EXP onto vector QUEUE if it is not already present. */
3365
3366 static void
3367 push_without_duplicates (tree exp, vec<tree> *queue)
3368 {
3369 unsigned int i;
3370 tree iter;
3371
3372 FOR_EACH_VEC_ELT (*queue, i, iter)
3373 if (simple_cst_equal (iter, exp) == 1)
3374 break;
3375
3376 if (!iter)
3377 queue->safe_push (exp);
3378 }
3379
3380 /* Given a tree EXP, find all occurrences of references to fields
3381 in a PLACEHOLDER_EXPR and place them in vector REFS without
3382 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3383 we assume here that EXP contains only arithmetic expressions
3384 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3385 argument list. */
3386
3387 void
3388 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3389 {
3390 enum tree_code code = TREE_CODE (exp);
3391 tree inner;
3392 int i;
3393
3394 /* We handle TREE_LIST and COMPONENT_REF separately. */
3395 if (code == TREE_LIST)
3396 {
3397 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3398 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3399 }
3400 else if (code == COMPONENT_REF)
3401 {
3402 for (inner = TREE_OPERAND (exp, 0);
3403 REFERENCE_CLASS_P (inner);
3404 inner = TREE_OPERAND (inner, 0))
3405 ;
3406
3407 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3408 push_without_duplicates (exp, refs);
3409 else
3410 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3411 }
3412 else
3413 switch (TREE_CODE_CLASS (code))
3414 {
3415 case tcc_constant:
3416 break;
3417
3418 case tcc_declaration:
3419 /* Variables allocated to static storage can stay. */
3420 if (!TREE_STATIC (exp))
3421 push_without_duplicates (exp, refs);
3422 break;
3423
3424 case tcc_expression:
3425 /* This is the pattern built in ada/make_aligning_type. */
3426 if (code == ADDR_EXPR
3427 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3428 {
3429 push_without_duplicates (exp, refs);
3430 break;
3431 }
3432
3433 /* Fall through... */
3434
3435 case tcc_exceptional:
3436 case tcc_unary:
3437 case tcc_binary:
3438 case tcc_comparison:
3439 case tcc_reference:
3440 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3441 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3442 break;
3443
3444 case tcc_vl_exp:
3445 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3446 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3447 break;
3448
3449 default:
3450 gcc_unreachable ();
3451 }
3452 }
3453
3454 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3455 return a tree with all occurrences of references to F in a
3456 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3457 CONST_DECLs. Note that we assume here that EXP contains only
3458 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3459 occurring only in their argument list. */
3460
3461 tree
3462 substitute_in_expr (tree exp, tree f, tree r)
3463 {
3464 enum tree_code code = TREE_CODE (exp);
3465 tree op0, op1, op2, op3;
3466 tree new_tree;
3467
3468 /* We handle TREE_LIST and COMPONENT_REF separately. */
3469 if (code == TREE_LIST)
3470 {
3471 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3472 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3473 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3474 return exp;
3475
3476 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3477 }
3478 else if (code == COMPONENT_REF)
3479 {
3480 tree inner;
3481
3482 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3483 and it is the right field, replace it with R. */
3484 for (inner = TREE_OPERAND (exp, 0);
3485 REFERENCE_CLASS_P (inner);
3486 inner = TREE_OPERAND (inner, 0))
3487 ;
3488
3489 /* The field. */
3490 op1 = TREE_OPERAND (exp, 1);
3491
3492 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3493 return r;
3494
3495 /* If this expression hasn't been completed let, leave it alone. */
3496 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
3497 return exp;
3498
3499 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3500 if (op0 == TREE_OPERAND (exp, 0))
3501 return exp;
3502
3503 new_tree
3504 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
3505 }
3506 else
3507 switch (TREE_CODE_CLASS (code))
3508 {
3509 case tcc_constant:
3510 return exp;
3511
3512 case tcc_declaration:
3513 if (exp == f)
3514 return r;
3515 else
3516 return exp;
3517
3518 case tcc_expression:
3519 if (exp == f)
3520 return r;
3521
3522 /* Fall through... */
3523
3524 case tcc_exceptional:
3525 case tcc_unary:
3526 case tcc_binary:
3527 case tcc_comparison:
3528 case tcc_reference:
3529 switch (TREE_CODE_LENGTH (code))
3530 {
3531 case 0:
3532 return exp;
3533
3534 case 1:
3535 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3536 if (op0 == TREE_OPERAND (exp, 0))
3537 return exp;
3538
3539 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3540 break;
3541
3542 case 2:
3543 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3544 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3545
3546 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3547 return exp;
3548
3549 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3550 break;
3551
3552 case 3:
3553 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3554 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3555 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3556
3557 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3558 && op2 == TREE_OPERAND (exp, 2))
3559 return exp;
3560
3561 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3562 break;
3563
3564 case 4:
3565 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3566 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3567 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3568 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
3569
3570 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3571 && op2 == TREE_OPERAND (exp, 2)
3572 && op3 == TREE_OPERAND (exp, 3))
3573 return exp;
3574
3575 new_tree
3576 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3577 break;
3578
3579 default:
3580 gcc_unreachable ();
3581 }
3582 break;
3583
3584 case tcc_vl_exp:
3585 {
3586 int i;
3587
3588 new_tree = NULL_TREE;
3589
3590 /* If we are trying to replace F with a constant, inline back
3591 functions which do nothing else than computing a value from
3592 the arguments they are passed. This makes it possible to
3593 fold partially or entirely the replacement expression. */
3594 if (CONSTANT_CLASS_P (r) && code == CALL_EXPR)
3595 {
3596 tree t = maybe_inline_call_in_expr (exp);
3597 if (t)
3598 return SUBSTITUTE_IN_EXPR (t, f, r);
3599 }
3600
3601 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3602 {
3603 tree op = TREE_OPERAND (exp, i);
3604 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
3605 if (new_op != op)
3606 {
3607 if (!new_tree)
3608 new_tree = copy_node (exp);
3609 TREE_OPERAND (new_tree, i) = new_op;
3610 }
3611 }
3612
3613 if (new_tree)
3614 {
3615 new_tree = fold (new_tree);
3616 if (TREE_CODE (new_tree) == CALL_EXPR)
3617 process_call_operands (new_tree);
3618 }
3619 else
3620 return exp;
3621 }
3622 break;
3623
3624 default:
3625 gcc_unreachable ();
3626 }
3627
3628 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3629
3630 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3631 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3632
3633 return new_tree;
3634 }
3635
3636 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3637 for it within OBJ, a tree that is an object or a chain of references. */
3638
3639 tree
3640 substitute_placeholder_in_expr (tree exp, tree obj)
3641 {
3642 enum tree_code code = TREE_CODE (exp);
3643 tree op0, op1, op2, op3;
3644 tree new_tree;
3645
3646 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3647 in the chain of OBJ. */
3648 if (code == PLACEHOLDER_EXPR)
3649 {
3650 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
3651 tree elt;
3652
3653 for (elt = obj; elt != 0;
3654 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3655 || TREE_CODE (elt) == COND_EXPR)
3656 ? TREE_OPERAND (elt, 1)
3657 : (REFERENCE_CLASS_P (elt)
3658 || UNARY_CLASS_P (elt)
3659 || BINARY_CLASS_P (elt)
3660 || VL_EXP_CLASS_P (elt)
3661 || EXPRESSION_CLASS_P (elt))
3662 ? TREE_OPERAND (elt, 0) : 0))
3663 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
3664 return elt;
3665
3666 for (elt = obj; elt != 0;
3667 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3668 || TREE_CODE (elt) == COND_EXPR)
3669 ? TREE_OPERAND (elt, 1)
3670 : (REFERENCE_CLASS_P (elt)
3671 || UNARY_CLASS_P (elt)
3672 || BINARY_CLASS_P (elt)
3673 || VL_EXP_CLASS_P (elt)
3674 || EXPRESSION_CLASS_P (elt))
3675 ? TREE_OPERAND (elt, 0) : 0))
3676 if (POINTER_TYPE_P (TREE_TYPE (elt))
3677 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
3678 == need_type))
3679 return fold_build1 (INDIRECT_REF, need_type, elt);
3680
3681 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
3682 survives until RTL generation, there will be an error. */
3683 return exp;
3684 }
3685
3686 /* TREE_LIST is special because we need to look at TREE_VALUE
3687 and TREE_CHAIN, not TREE_OPERANDS. */
3688 else if (code == TREE_LIST)
3689 {
3690 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
3691 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
3692 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3693 return exp;
3694
3695 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3696 }
3697 else
3698 switch (TREE_CODE_CLASS (code))
3699 {
3700 case tcc_constant:
3701 case tcc_declaration:
3702 return exp;
3703
3704 case tcc_exceptional:
3705 case tcc_unary:
3706 case tcc_binary:
3707 case tcc_comparison:
3708 case tcc_expression:
3709 case tcc_reference:
3710 case tcc_statement:
3711 switch (TREE_CODE_LENGTH (code))
3712 {
3713 case 0:
3714 return exp;
3715
3716 case 1:
3717 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3718 if (op0 == TREE_OPERAND (exp, 0))
3719 return exp;
3720
3721 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3722 break;
3723
3724 case 2:
3725 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3726 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3727
3728 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3729 return exp;
3730
3731 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3732 break;
3733
3734 case 3:
3735 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3736 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3737 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3738
3739 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3740 && op2 == TREE_OPERAND (exp, 2))
3741 return exp;
3742
3743 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3744 break;
3745
3746 case 4:
3747 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3748 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3749 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3750 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
3751
3752 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3753 && op2 == TREE_OPERAND (exp, 2)
3754 && op3 == TREE_OPERAND (exp, 3))
3755 return exp;
3756
3757 new_tree
3758 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3759 break;
3760
3761 default:
3762 gcc_unreachable ();
3763 }
3764 break;
3765
3766 case tcc_vl_exp:
3767 {
3768 int i;
3769
3770 new_tree = NULL_TREE;
3771
3772 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3773 {
3774 tree op = TREE_OPERAND (exp, i);
3775 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
3776 if (new_op != op)
3777 {
3778 if (!new_tree)
3779 new_tree = copy_node (exp);
3780 TREE_OPERAND (new_tree, i) = new_op;
3781 }
3782 }
3783
3784 if (new_tree)
3785 {
3786 new_tree = fold (new_tree);
3787 if (TREE_CODE (new_tree) == CALL_EXPR)
3788 process_call_operands (new_tree);
3789 }
3790 else
3791 return exp;
3792 }
3793 break;
3794
3795 default:
3796 gcc_unreachable ();
3797 }
3798
3799 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3800
3801 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3802 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3803
3804 return new_tree;
3805 }
3806 \f
3807
3808 /* Subroutine of stabilize_reference; this is called for subtrees of
3809 references. Any expression with side-effects must be put in a SAVE_EXPR
3810 to ensure that it is only evaluated once.
3811
3812 We don't put SAVE_EXPR nodes around everything, because assigning very
3813 simple expressions to temporaries causes us to miss good opportunities
3814 for optimizations. Among other things, the opportunity to fold in the
3815 addition of a constant into an addressing mode often gets lost, e.g.
3816 "y[i+1] += x;". In general, we take the approach that we should not make
3817 an assignment unless we are forced into it - i.e., that any non-side effect
3818 operator should be allowed, and that cse should take care of coalescing
3819 multiple utterances of the same expression should that prove fruitful. */
3820
3821 static tree
3822 stabilize_reference_1 (tree e)
3823 {
3824 tree result;
3825 enum tree_code code = TREE_CODE (e);
3826
3827 /* We cannot ignore const expressions because it might be a reference
3828 to a const array but whose index contains side-effects. But we can
3829 ignore things that are actual constant or that already have been
3830 handled by this function. */
3831
3832 if (tree_invariant_p (e))
3833 return e;
3834
3835 switch (TREE_CODE_CLASS (code))
3836 {
3837 case tcc_exceptional:
3838 case tcc_type:
3839 case tcc_declaration:
3840 case tcc_comparison:
3841 case tcc_statement:
3842 case tcc_expression:
3843 case tcc_reference:
3844 case tcc_vl_exp:
3845 /* If the expression has side-effects, then encase it in a SAVE_EXPR
3846 so that it will only be evaluated once. */
3847 /* The reference (r) and comparison (<) classes could be handled as
3848 below, but it is generally faster to only evaluate them once. */
3849 if (TREE_SIDE_EFFECTS (e))
3850 return save_expr (e);
3851 return e;
3852
3853 case tcc_constant:
3854 /* Constants need no processing. In fact, we should never reach
3855 here. */
3856 return e;
3857
3858 case tcc_binary:
3859 /* Division is slow and tends to be compiled with jumps,
3860 especially the division by powers of 2 that is often
3861 found inside of an array reference. So do it just once. */
3862 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
3863 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
3864 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
3865 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
3866 return save_expr (e);
3867 /* Recursively stabilize each operand. */
3868 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
3869 stabilize_reference_1 (TREE_OPERAND (e, 1)));
3870 break;
3871
3872 case tcc_unary:
3873 /* Recursively stabilize each operand. */
3874 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
3875 break;
3876
3877 default:
3878 gcc_unreachable ();
3879 }
3880
3881 TREE_TYPE (result) = TREE_TYPE (e);
3882 TREE_READONLY (result) = TREE_READONLY (e);
3883 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
3884 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
3885
3886 return result;
3887 }
3888
3889 /* Stabilize a reference so that we can use it any number of times
3890 without causing its operands to be evaluated more than once.
3891 Returns the stabilized reference. This works by means of save_expr,
3892 so see the caveats in the comments about save_expr.
3893
3894 Also allows conversion expressions whose operands are references.
3895 Any other kind of expression is returned unchanged. */
3896
3897 tree
3898 stabilize_reference (tree ref)
3899 {
3900 tree result;
3901 enum tree_code code = TREE_CODE (ref);
3902
3903 switch (code)
3904 {
3905 case VAR_DECL:
3906 case PARM_DECL:
3907 case RESULT_DECL:
3908 /* No action is needed in this case. */
3909 return ref;
3910
3911 CASE_CONVERT:
3912 case FLOAT_EXPR:
3913 case FIX_TRUNC_EXPR:
3914 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
3915 break;
3916
3917 case INDIRECT_REF:
3918 result = build_nt (INDIRECT_REF,
3919 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
3920 break;
3921
3922 case COMPONENT_REF:
3923 result = build_nt (COMPONENT_REF,
3924 stabilize_reference (TREE_OPERAND (ref, 0)),
3925 TREE_OPERAND (ref, 1), NULL_TREE);
3926 break;
3927
3928 case BIT_FIELD_REF:
3929 result = build_nt (BIT_FIELD_REF,
3930 stabilize_reference (TREE_OPERAND (ref, 0)),
3931 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
3932 break;
3933
3934 case ARRAY_REF:
3935 result = build_nt (ARRAY_REF,
3936 stabilize_reference (TREE_OPERAND (ref, 0)),
3937 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
3938 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
3939 break;
3940
3941 case ARRAY_RANGE_REF:
3942 result = build_nt (ARRAY_RANGE_REF,
3943 stabilize_reference (TREE_OPERAND (ref, 0)),
3944 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
3945 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
3946 break;
3947
3948 case COMPOUND_EXPR:
3949 /* We cannot wrap the first expression in a SAVE_EXPR, as then
3950 it wouldn't be ignored. This matters when dealing with
3951 volatiles. */
3952 return stabilize_reference_1 (ref);
3953
3954 /* If arg isn't a kind of lvalue we recognize, make no change.
3955 Caller should recognize the error for an invalid lvalue. */
3956 default:
3957 return ref;
3958
3959 case ERROR_MARK:
3960 return error_mark_node;
3961 }
3962
3963 TREE_TYPE (result) = TREE_TYPE (ref);
3964 TREE_READONLY (result) = TREE_READONLY (ref);
3965 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
3966 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
3967
3968 return result;
3969 }
3970 \f
3971 /* Low-level constructors for expressions. */
3972
3973 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
3974 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
3975
3976 void
3977 recompute_tree_invariant_for_addr_expr (tree t)
3978 {
3979 tree node;
3980 bool tc = true, se = false;
3981
3982 /* We started out assuming this address is both invariant and constant, but
3983 does not have side effects. Now go down any handled components and see if
3984 any of them involve offsets that are either non-constant or non-invariant.
3985 Also check for side-effects.
3986
3987 ??? Note that this code makes no attempt to deal with the case where
3988 taking the address of something causes a copy due to misalignment. */
3989
3990 #define UPDATE_FLAGS(NODE) \
3991 do { tree _node = (NODE); \
3992 if (_node && !TREE_CONSTANT (_node)) tc = false; \
3993 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
3994
3995 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
3996 node = TREE_OPERAND (node, 0))
3997 {
3998 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
3999 array reference (probably made temporarily by the G++ front end),
4000 so ignore all the operands. */
4001 if ((TREE_CODE (node) == ARRAY_REF
4002 || TREE_CODE (node) == ARRAY_RANGE_REF)
4003 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4004 {
4005 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4006 if (TREE_OPERAND (node, 2))
4007 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4008 if (TREE_OPERAND (node, 3))
4009 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4010 }
4011 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4012 FIELD_DECL, apparently. The G++ front end can put something else
4013 there, at least temporarily. */
4014 else if (TREE_CODE (node) == COMPONENT_REF
4015 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4016 {
4017 if (TREE_OPERAND (node, 2))
4018 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4019 }
4020 }
4021
4022 node = lang_hooks.expr_to_decl (node, &tc, &se);
4023
4024 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4025 the address, since &(*a)->b is a form of addition. If it's a constant, the
4026 address is constant too. If it's a decl, its address is constant if the
4027 decl is static. Everything else is not constant and, furthermore,
4028 taking the address of a volatile variable is not volatile. */
4029 if (TREE_CODE (node) == INDIRECT_REF
4030 || TREE_CODE (node) == MEM_REF)
4031 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4032 else if (CONSTANT_CLASS_P (node))
4033 ;
4034 else if (DECL_P (node))
4035 tc &= (staticp (node) != NULL_TREE);
4036 else
4037 {
4038 tc = false;
4039 se |= TREE_SIDE_EFFECTS (node);
4040 }
4041
4042
4043 TREE_CONSTANT (t) = tc;
4044 TREE_SIDE_EFFECTS (t) = se;
4045 #undef UPDATE_FLAGS
4046 }
4047
4048 /* Build an expression of code CODE, data type TYPE, and operands as
4049 specified. Expressions and reference nodes can be created this way.
4050 Constants, decls, types and misc nodes cannot be.
4051
4052 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4053 enough for all extant tree codes. */
4054
4055 tree
4056 build0_stat (enum tree_code code, tree tt MEM_STAT_DECL)
4057 {
4058 tree t;
4059
4060 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4061
4062 t = make_node_stat (code PASS_MEM_STAT);
4063 TREE_TYPE (t) = tt;
4064
4065 return t;
4066 }
4067
4068 tree
4069 build1_stat (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4070 {
4071 int length = sizeof (struct tree_exp);
4072 tree t;
4073
4074 record_node_allocation_statistics (code, length);
4075
4076 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4077
4078 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4079
4080 memset (t, 0, sizeof (struct tree_common));
4081
4082 TREE_SET_CODE (t, code);
4083
4084 TREE_TYPE (t) = type;
4085 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4086 TREE_OPERAND (t, 0) = node;
4087 if (node && !TYPE_P (node))
4088 {
4089 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4090 TREE_READONLY (t) = TREE_READONLY (node);
4091 }
4092
4093 if (TREE_CODE_CLASS (code) == tcc_statement)
4094 TREE_SIDE_EFFECTS (t) = 1;
4095 else switch (code)
4096 {
4097 case VA_ARG_EXPR:
4098 /* All of these have side-effects, no matter what their
4099 operands are. */
4100 TREE_SIDE_EFFECTS (t) = 1;
4101 TREE_READONLY (t) = 0;
4102 break;
4103
4104 case INDIRECT_REF:
4105 /* Whether a dereference is readonly has nothing to do with whether
4106 its operand is readonly. */
4107 TREE_READONLY (t) = 0;
4108 break;
4109
4110 case ADDR_EXPR:
4111 if (node)
4112 recompute_tree_invariant_for_addr_expr (t);
4113 break;
4114
4115 default:
4116 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4117 && node && !TYPE_P (node)
4118 && TREE_CONSTANT (node))
4119 TREE_CONSTANT (t) = 1;
4120 if (TREE_CODE_CLASS (code) == tcc_reference
4121 && node && TREE_THIS_VOLATILE (node))
4122 TREE_THIS_VOLATILE (t) = 1;
4123 break;
4124 }
4125
4126 return t;
4127 }
4128
4129 #define PROCESS_ARG(N) \
4130 do { \
4131 TREE_OPERAND (t, N) = arg##N; \
4132 if (arg##N &&!TYPE_P (arg##N)) \
4133 { \
4134 if (TREE_SIDE_EFFECTS (arg##N)) \
4135 side_effects = 1; \
4136 if (!TREE_READONLY (arg##N) \
4137 && !CONSTANT_CLASS_P (arg##N)) \
4138 (void) (read_only = 0); \
4139 if (!TREE_CONSTANT (arg##N)) \
4140 (void) (constant = 0); \
4141 } \
4142 } while (0)
4143
4144 tree
4145 build2_stat (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4146 {
4147 bool constant, read_only, side_effects;
4148 tree t;
4149
4150 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4151
4152 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4153 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4154 /* When sizetype precision doesn't match that of pointers
4155 we need to be able to build explicit extensions or truncations
4156 of the offset argument. */
4157 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4158 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4159 && TREE_CODE (arg1) == INTEGER_CST);
4160
4161 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4162 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4163 && ptrofftype_p (TREE_TYPE (arg1)));
4164
4165 t = make_node_stat (code PASS_MEM_STAT);
4166 TREE_TYPE (t) = tt;
4167
4168 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4169 result based on those same flags for the arguments. But if the
4170 arguments aren't really even `tree' expressions, we shouldn't be trying
4171 to do this. */
4172
4173 /* Expressions without side effects may be constant if their
4174 arguments are as well. */
4175 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4176 || TREE_CODE_CLASS (code) == tcc_binary);
4177 read_only = 1;
4178 side_effects = TREE_SIDE_EFFECTS (t);
4179
4180 PROCESS_ARG (0);
4181 PROCESS_ARG (1);
4182
4183 TREE_READONLY (t) = read_only;
4184 TREE_CONSTANT (t) = constant;
4185 TREE_SIDE_EFFECTS (t) = side_effects;
4186 TREE_THIS_VOLATILE (t)
4187 = (TREE_CODE_CLASS (code) == tcc_reference
4188 && arg0 && TREE_THIS_VOLATILE (arg0));
4189
4190 return t;
4191 }
4192
4193
4194 tree
4195 build3_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4196 tree arg2 MEM_STAT_DECL)
4197 {
4198 bool constant, read_only, side_effects;
4199 tree t;
4200
4201 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4202 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4203
4204 t = make_node_stat (code PASS_MEM_STAT);
4205 TREE_TYPE (t) = tt;
4206
4207 read_only = 1;
4208
4209 /* As a special exception, if COND_EXPR has NULL branches, we
4210 assume that it is a gimple statement and always consider
4211 it to have side effects. */
4212 if (code == COND_EXPR
4213 && tt == void_type_node
4214 && arg1 == NULL_TREE
4215 && arg2 == NULL_TREE)
4216 side_effects = true;
4217 else
4218 side_effects = TREE_SIDE_EFFECTS (t);
4219
4220 PROCESS_ARG (0);
4221 PROCESS_ARG (1);
4222 PROCESS_ARG (2);
4223
4224 if (code == COND_EXPR)
4225 TREE_READONLY (t) = read_only;
4226
4227 TREE_SIDE_EFFECTS (t) = side_effects;
4228 TREE_THIS_VOLATILE (t)
4229 = (TREE_CODE_CLASS (code) == tcc_reference
4230 && arg0 && TREE_THIS_VOLATILE (arg0));
4231
4232 return t;
4233 }
4234
4235 tree
4236 build4_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4237 tree arg2, tree arg3 MEM_STAT_DECL)
4238 {
4239 bool constant, read_only, side_effects;
4240 tree t;
4241
4242 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4243
4244 t = make_node_stat (code PASS_MEM_STAT);
4245 TREE_TYPE (t) = tt;
4246
4247 side_effects = TREE_SIDE_EFFECTS (t);
4248
4249 PROCESS_ARG (0);
4250 PROCESS_ARG (1);
4251 PROCESS_ARG (2);
4252 PROCESS_ARG (3);
4253
4254 TREE_SIDE_EFFECTS (t) = side_effects;
4255 TREE_THIS_VOLATILE (t)
4256 = (TREE_CODE_CLASS (code) == tcc_reference
4257 && arg0 && TREE_THIS_VOLATILE (arg0));
4258
4259 return t;
4260 }
4261
4262 tree
4263 build5_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4264 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4265 {
4266 bool constant, read_only, side_effects;
4267 tree t;
4268
4269 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4270
4271 t = make_node_stat (code PASS_MEM_STAT);
4272 TREE_TYPE (t) = tt;
4273
4274 side_effects = TREE_SIDE_EFFECTS (t);
4275
4276 PROCESS_ARG (0);
4277 PROCESS_ARG (1);
4278 PROCESS_ARG (2);
4279 PROCESS_ARG (3);
4280 PROCESS_ARG (4);
4281
4282 TREE_SIDE_EFFECTS (t) = side_effects;
4283 TREE_THIS_VOLATILE (t)
4284 = (TREE_CODE_CLASS (code) == tcc_reference
4285 && arg0 && TREE_THIS_VOLATILE (arg0));
4286
4287 return t;
4288 }
4289
4290 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4291 on the pointer PTR. */
4292
4293 tree
4294 build_simple_mem_ref_loc (location_t loc, tree ptr)
4295 {
4296 HOST_WIDE_INT offset = 0;
4297 tree ptype = TREE_TYPE (ptr);
4298 tree tem;
4299 /* For convenience allow addresses that collapse to a simple base
4300 and offset. */
4301 if (TREE_CODE (ptr) == ADDR_EXPR
4302 && (handled_component_p (TREE_OPERAND (ptr, 0))
4303 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4304 {
4305 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4306 gcc_assert (ptr);
4307 ptr = build_fold_addr_expr (ptr);
4308 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4309 }
4310 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4311 ptr, build_int_cst (ptype, offset));
4312 SET_EXPR_LOCATION (tem, loc);
4313 return tem;
4314 }
4315
4316 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4317
4318 offset_int
4319 mem_ref_offset (const_tree t)
4320 {
4321 return offset_int::from (TREE_OPERAND (t, 1), SIGNED);
4322 }
4323
4324 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4325 offsetted by OFFSET units. */
4326
4327 tree
4328 build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
4329 {
4330 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4331 build_fold_addr_expr (base),
4332 build_int_cst (ptr_type_node, offset));
4333 tree addr = build1 (ADDR_EXPR, type, ref);
4334 recompute_tree_invariant_for_addr_expr (addr);
4335 return addr;
4336 }
4337
4338 /* Similar except don't specify the TREE_TYPE
4339 and leave the TREE_SIDE_EFFECTS as 0.
4340 It is permissible for arguments to be null,
4341 or even garbage if their values do not matter. */
4342
4343 tree
4344 build_nt (enum tree_code code, ...)
4345 {
4346 tree t;
4347 int length;
4348 int i;
4349 va_list p;
4350
4351 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4352
4353 va_start (p, code);
4354
4355 t = make_node (code);
4356 length = TREE_CODE_LENGTH (code);
4357
4358 for (i = 0; i < length; i++)
4359 TREE_OPERAND (t, i) = va_arg (p, tree);
4360
4361 va_end (p);
4362 return t;
4363 }
4364
4365 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4366 tree vec. */
4367
4368 tree
4369 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4370 {
4371 tree ret, t;
4372 unsigned int ix;
4373
4374 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4375 CALL_EXPR_FN (ret) = fn;
4376 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4377 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4378 CALL_EXPR_ARG (ret, ix) = t;
4379 return ret;
4380 }
4381 \f
4382 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4383 We do NOT enter this node in any sort of symbol table.
4384
4385 LOC is the location of the decl.
4386
4387 layout_decl is used to set up the decl's storage layout.
4388 Other slots are initialized to 0 or null pointers. */
4389
4390 tree
4391 build_decl_stat (location_t loc, enum tree_code code, tree name,
4392 tree type MEM_STAT_DECL)
4393 {
4394 tree t;
4395
4396 t = make_node_stat (code PASS_MEM_STAT);
4397 DECL_SOURCE_LOCATION (t) = loc;
4398
4399 /* if (type == error_mark_node)
4400 type = integer_type_node; */
4401 /* That is not done, deliberately, so that having error_mark_node
4402 as the type can suppress useless errors in the use of this variable. */
4403
4404 DECL_NAME (t) = name;
4405 TREE_TYPE (t) = type;
4406
4407 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4408 layout_decl (t, 0);
4409
4410 return t;
4411 }
4412
4413 /* Builds and returns function declaration with NAME and TYPE. */
4414
4415 tree
4416 build_fn_decl (const char *name, tree type)
4417 {
4418 tree id = get_identifier (name);
4419 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4420
4421 DECL_EXTERNAL (decl) = 1;
4422 TREE_PUBLIC (decl) = 1;
4423 DECL_ARTIFICIAL (decl) = 1;
4424 TREE_NOTHROW (decl) = 1;
4425
4426 return decl;
4427 }
4428
4429 vec<tree, va_gc> *all_translation_units;
4430
4431 /* Builds a new translation-unit decl with name NAME, queues it in the
4432 global list of translation-unit decls and returns it. */
4433
4434 tree
4435 build_translation_unit_decl (tree name)
4436 {
4437 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
4438 name, NULL_TREE);
4439 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
4440 vec_safe_push (all_translation_units, tu);
4441 return tu;
4442 }
4443
4444 \f
4445 /* BLOCK nodes are used to represent the structure of binding contours
4446 and declarations, once those contours have been exited and their contents
4447 compiled. This information is used for outputting debugging info. */
4448
4449 tree
4450 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
4451 {
4452 tree block = make_node (BLOCK);
4453
4454 BLOCK_VARS (block) = vars;
4455 BLOCK_SUBBLOCKS (block) = subblocks;
4456 BLOCK_SUPERCONTEXT (block) = supercontext;
4457 BLOCK_CHAIN (block) = chain;
4458 return block;
4459 }
4460
4461 \f
4462 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4463
4464 LOC is the location to use in tree T. */
4465
4466 void
4467 protected_set_expr_location (tree t, location_t loc)
4468 {
4469 if (t && CAN_HAVE_LOCATION_P (t))
4470 SET_EXPR_LOCATION (t, loc);
4471 }
4472 \f
4473 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES
4474 is ATTRIBUTE. */
4475
4476 tree
4477 build_decl_attribute_variant (tree ddecl, tree attribute)
4478 {
4479 DECL_ATTRIBUTES (ddecl) = attribute;
4480 return ddecl;
4481 }
4482
4483 /* Borrowed from hashtab.c iterative_hash implementation. */
4484 #define mix(a,b,c) \
4485 { \
4486 a -= b; a -= c; a ^= (c>>13); \
4487 b -= c; b -= a; b ^= (a<< 8); \
4488 c -= a; c -= b; c ^= ((b&0xffffffff)>>13); \
4489 a -= b; a -= c; a ^= ((c&0xffffffff)>>12); \
4490 b -= c; b -= a; b = (b ^ (a<<16)) & 0xffffffff; \
4491 c -= a; c -= b; c = (c ^ (b>> 5)) & 0xffffffff; \
4492 a -= b; a -= c; a = (a ^ (c>> 3)) & 0xffffffff; \
4493 b -= c; b -= a; b = (b ^ (a<<10)) & 0xffffffff; \
4494 c -= a; c -= b; c = (c ^ (b>>15)) & 0xffffffff; \
4495 }
4496
4497
4498 /* Produce good hash value combining VAL and VAL2. */
4499 hashval_t
4500 iterative_hash_hashval_t (hashval_t val, hashval_t val2)
4501 {
4502 /* the golden ratio; an arbitrary value. */
4503 hashval_t a = 0x9e3779b9;
4504
4505 mix (a, val, val2);
4506 return val2;
4507 }
4508
4509 /* Produce good hash value combining VAL and VAL2. */
4510 hashval_t
4511 iterative_hash_host_wide_int (HOST_WIDE_INT val, hashval_t val2)
4512 {
4513 if (sizeof (HOST_WIDE_INT) == sizeof (hashval_t))
4514 return iterative_hash_hashval_t (val, val2);
4515 else
4516 {
4517 hashval_t a = (hashval_t) val;
4518 /* Avoid warnings about shifting of more than the width of the type on
4519 hosts that won't execute this path. */
4520 int zero = 0;
4521 hashval_t b = (hashval_t) (val >> (sizeof (hashval_t) * 8 + zero));
4522 mix (a, b, val2);
4523 if (sizeof (HOST_WIDE_INT) > 2 * sizeof (hashval_t))
4524 {
4525 hashval_t a = (hashval_t) (val >> (sizeof (hashval_t) * 16 + zero));
4526 hashval_t b = (hashval_t) (val >> (sizeof (hashval_t) * 24 + zero));
4527 mix (a, b, val2);
4528 }
4529 return val2;
4530 }
4531 }
4532
4533 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4534 is ATTRIBUTE and its qualifiers are QUALS.
4535
4536 Record such modified types already made so we don't make duplicates. */
4537
4538 tree
4539 build_type_attribute_qual_variant (tree ttype, tree attribute, int quals)
4540 {
4541 if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype), attribute))
4542 {
4543 hashval_t hashcode = 0;
4544 tree ntype;
4545 int i;
4546 tree t;
4547 enum tree_code code = TREE_CODE (ttype);
4548
4549 /* Building a distinct copy of a tagged type is inappropriate; it
4550 causes breakage in code that expects there to be a one-to-one
4551 relationship between a struct and its fields.
4552 build_duplicate_type is another solution (as used in
4553 handle_transparent_union_attribute), but that doesn't play well
4554 with the stronger C++ type identity model. */
4555 if (TREE_CODE (ttype) == RECORD_TYPE
4556 || TREE_CODE (ttype) == UNION_TYPE
4557 || TREE_CODE (ttype) == QUAL_UNION_TYPE
4558 || TREE_CODE (ttype) == ENUMERAL_TYPE)
4559 {
4560 warning (OPT_Wattributes,
4561 "ignoring attributes applied to %qT after definition",
4562 TYPE_MAIN_VARIANT (ttype));
4563 return build_qualified_type (ttype, quals);
4564 }
4565
4566 ttype = build_qualified_type (ttype, TYPE_UNQUALIFIED);
4567 ntype = build_distinct_type_copy (ttype);
4568
4569 TYPE_ATTRIBUTES (ntype) = attribute;
4570
4571 hashcode = iterative_hash_object (code, hashcode);
4572 if (TREE_TYPE (ntype))
4573 hashcode = iterative_hash_object (TYPE_HASH (TREE_TYPE (ntype)),
4574 hashcode);
4575 hashcode = attribute_hash_list (attribute, hashcode);
4576
4577 switch (TREE_CODE (ntype))
4578 {
4579 case FUNCTION_TYPE:
4580 hashcode = type_hash_list (TYPE_ARG_TYPES (ntype), hashcode);
4581 break;
4582 case ARRAY_TYPE:
4583 if (TYPE_DOMAIN (ntype))
4584 hashcode = iterative_hash_object (TYPE_HASH (TYPE_DOMAIN (ntype)),
4585 hashcode);
4586 break;
4587 case INTEGER_TYPE:
4588 t = TYPE_MAX_VALUE (ntype);
4589 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
4590 hashcode = iterative_hash_object (TREE_INT_CST_ELT (t, i), hashcode);
4591 break;
4592 case REAL_TYPE:
4593 case FIXED_POINT_TYPE:
4594 {
4595 unsigned int precision = TYPE_PRECISION (ntype);
4596 hashcode = iterative_hash_object (precision, hashcode);
4597 }
4598 break;
4599 default:
4600 break;
4601 }
4602
4603 ntype = type_hash_canon (hashcode, ntype);
4604
4605 /* If the target-dependent attributes make NTYPE different from
4606 its canonical type, we will need to use structural equality
4607 checks for this type. */
4608 if (TYPE_STRUCTURAL_EQUALITY_P (ttype)
4609 || !comp_type_attributes (ntype, ttype))
4610 SET_TYPE_STRUCTURAL_EQUALITY (ntype);
4611 else if (TYPE_CANONICAL (ntype) == ntype)
4612 TYPE_CANONICAL (ntype) = TYPE_CANONICAL (ttype);
4613
4614 ttype = build_qualified_type (ntype, quals);
4615 }
4616 else if (TYPE_QUALS (ttype) != quals)
4617 ttype = build_qualified_type (ttype, quals);
4618
4619 return ttype;
4620 }
4621
4622 /* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are
4623 the same. */
4624
4625 static bool
4626 omp_declare_simd_clauses_equal (tree clauses1, tree clauses2)
4627 {
4628 tree cl1, cl2;
4629 for (cl1 = clauses1, cl2 = clauses2;
4630 cl1 && cl2;
4631 cl1 = OMP_CLAUSE_CHAIN (cl1), cl2 = OMP_CLAUSE_CHAIN (cl2))
4632 {
4633 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_CODE (cl2))
4634 return false;
4635 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_SIMDLEN)
4636 {
4637 if (simple_cst_equal (OMP_CLAUSE_DECL (cl1),
4638 OMP_CLAUSE_DECL (cl2)) != 1)
4639 return false;
4640 }
4641 switch (OMP_CLAUSE_CODE (cl1))
4642 {
4643 case OMP_CLAUSE_ALIGNED:
4644 if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1),
4645 OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2)) != 1)
4646 return false;
4647 break;
4648 case OMP_CLAUSE_LINEAR:
4649 if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1),
4650 OMP_CLAUSE_LINEAR_STEP (cl2)) != 1)
4651 return false;
4652 break;
4653 case OMP_CLAUSE_SIMDLEN:
4654 if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1),
4655 OMP_CLAUSE_SIMDLEN_EXPR (cl2)) != 1)
4656 return false;
4657 default:
4658 break;
4659 }
4660 }
4661 return true;
4662 }
4663
4664 /* Compare two constructor-element-type constants. Return 1 if the lists
4665 are known to be equal; otherwise return 0. */
4666
4667 static bool
4668 simple_cst_list_equal (const_tree l1, const_tree l2)
4669 {
4670 while (l1 != NULL_TREE && l2 != NULL_TREE)
4671 {
4672 if (simple_cst_equal (TREE_VALUE (l1), TREE_VALUE (l2)) != 1)
4673 return false;
4674
4675 l1 = TREE_CHAIN (l1);
4676 l2 = TREE_CHAIN (l2);
4677 }
4678
4679 return l1 == l2;
4680 }
4681
4682 /* Compare two attributes for their value identity. Return true if the
4683 attribute values are known to be equal; otherwise return false.
4684 */
4685
4686 static bool
4687 attribute_value_equal (const_tree attr1, const_tree attr2)
4688 {
4689 if (TREE_VALUE (attr1) == TREE_VALUE (attr2))
4690 return true;
4691
4692 if (TREE_VALUE (attr1) != NULL_TREE
4693 && TREE_CODE (TREE_VALUE (attr1)) == TREE_LIST
4694 && TREE_VALUE (attr2) != NULL
4695 && TREE_CODE (TREE_VALUE (attr2)) == TREE_LIST)
4696 return (simple_cst_list_equal (TREE_VALUE (attr1),
4697 TREE_VALUE (attr2)) == 1);
4698
4699 if ((flag_openmp || flag_openmp_simd)
4700 && TREE_VALUE (attr1) && TREE_VALUE (attr2)
4701 && TREE_CODE (TREE_VALUE (attr1)) == OMP_CLAUSE
4702 && TREE_CODE (TREE_VALUE (attr2)) == OMP_CLAUSE)
4703 return omp_declare_simd_clauses_equal (TREE_VALUE (attr1),
4704 TREE_VALUE (attr2));
4705
4706 return (simple_cst_equal (TREE_VALUE (attr1), TREE_VALUE (attr2)) == 1);
4707 }
4708
4709 /* Return 0 if the attributes for two types are incompatible, 1 if they
4710 are compatible, and 2 if they are nearly compatible (which causes a
4711 warning to be generated). */
4712 int
4713 comp_type_attributes (const_tree type1, const_tree type2)
4714 {
4715 const_tree a1 = TYPE_ATTRIBUTES (type1);
4716 const_tree a2 = TYPE_ATTRIBUTES (type2);
4717 const_tree a;
4718
4719 if (a1 == a2)
4720 return 1;
4721 for (a = a1; a != NULL_TREE; a = TREE_CHAIN (a))
4722 {
4723 const struct attribute_spec *as;
4724 const_tree attr;
4725
4726 as = lookup_attribute_spec (get_attribute_name (a));
4727 if (!as || as->affects_type_identity == false)
4728 continue;
4729
4730 attr = lookup_attribute (as->name, CONST_CAST_TREE (a2));
4731 if (!attr || !attribute_value_equal (a, attr))
4732 break;
4733 }
4734 if (!a)
4735 {
4736 for (a = a2; a != NULL_TREE; a = TREE_CHAIN (a))
4737 {
4738 const struct attribute_spec *as;
4739
4740 as = lookup_attribute_spec (get_attribute_name (a));
4741 if (!as || as->affects_type_identity == false)
4742 continue;
4743
4744 if (!lookup_attribute (as->name, CONST_CAST_TREE (a1)))
4745 break;
4746 /* We don't need to compare trees again, as we did this
4747 already in first loop. */
4748 }
4749 /* All types - affecting identity - are equal, so
4750 there is no need to call target hook for comparison. */
4751 if (!a)
4752 return 1;
4753 }
4754 /* As some type combinations - like default calling-convention - might
4755 be compatible, we have to call the target hook to get the final result. */
4756 return targetm.comp_type_attributes (type1, type2);
4757 }
4758
4759 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4760 is ATTRIBUTE.
4761
4762 Record such modified types already made so we don't make duplicates. */
4763
4764 tree
4765 build_type_attribute_variant (tree ttype, tree attribute)
4766 {
4767 return build_type_attribute_qual_variant (ttype, attribute,
4768 TYPE_QUALS (ttype));
4769 }
4770
4771
4772 /* Reset the expression *EXPR_P, a size or position.
4773
4774 ??? We could reset all non-constant sizes or positions. But it's cheap
4775 enough to not do so and refrain from adding workarounds to dwarf2out.c.
4776
4777 We need to reset self-referential sizes or positions because they cannot
4778 be gimplified and thus can contain a CALL_EXPR after the gimplification
4779 is finished, which will run afoul of LTO streaming. And they need to be
4780 reset to something essentially dummy but not constant, so as to preserve
4781 the properties of the object they are attached to. */
4782
4783 static inline void
4784 free_lang_data_in_one_sizepos (tree *expr_p)
4785 {
4786 tree expr = *expr_p;
4787 if (CONTAINS_PLACEHOLDER_P (expr))
4788 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
4789 }
4790
4791
4792 /* Reset all the fields in a binfo node BINFO. We only keep
4793 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
4794
4795 static void
4796 free_lang_data_in_binfo (tree binfo)
4797 {
4798 unsigned i;
4799 tree t;
4800
4801 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
4802
4803 BINFO_VIRTUALS (binfo) = NULL_TREE;
4804 BINFO_BASE_ACCESSES (binfo) = NULL;
4805 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
4806 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
4807
4808 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
4809 free_lang_data_in_binfo (t);
4810 }
4811
4812
4813 /* Reset all language specific information still present in TYPE. */
4814
4815 static void
4816 free_lang_data_in_type (tree type)
4817 {
4818 gcc_assert (TYPE_P (type));
4819
4820 /* Give the FE a chance to remove its own data first. */
4821 lang_hooks.free_lang_data (type);
4822
4823 TREE_LANG_FLAG_0 (type) = 0;
4824 TREE_LANG_FLAG_1 (type) = 0;
4825 TREE_LANG_FLAG_2 (type) = 0;
4826 TREE_LANG_FLAG_3 (type) = 0;
4827 TREE_LANG_FLAG_4 (type) = 0;
4828 TREE_LANG_FLAG_5 (type) = 0;
4829 TREE_LANG_FLAG_6 (type) = 0;
4830
4831 if (TREE_CODE (type) == FUNCTION_TYPE)
4832 {
4833 /* Remove the const and volatile qualifiers from arguments. The
4834 C++ front end removes them, but the C front end does not,
4835 leading to false ODR violation errors when merging two
4836 instances of the same function signature compiled by
4837 different front ends. */
4838 tree p;
4839
4840 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
4841 {
4842 tree arg_type = TREE_VALUE (p);
4843
4844 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
4845 {
4846 int quals = TYPE_QUALS (arg_type)
4847 & ~TYPE_QUAL_CONST
4848 & ~TYPE_QUAL_VOLATILE;
4849 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
4850 free_lang_data_in_type (TREE_VALUE (p));
4851 }
4852 }
4853 }
4854
4855 /* Remove members that are not actually FIELD_DECLs from the field
4856 list of an aggregate. These occur in C++. */
4857 if (RECORD_OR_UNION_TYPE_P (type))
4858 {
4859 tree prev, member;
4860
4861 /* Note that TYPE_FIELDS can be shared across distinct
4862 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is
4863 to be removed, we cannot set its TREE_CHAIN to NULL.
4864 Otherwise, we would not be able to find all the other fields
4865 in the other instances of this TREE_TYPE.
4866
4867 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */
4868 prev = NULL_TREE;
4869 member = TYPE_FIELDS (type);
4870 while (member)
4871 {
4872 if (TREE_CODE (member) == FIELD_DECL
4873 || TREE_CODE (member) == TYPE_DECL)
4874 {
4875 if (prev)
4876 TREE_CHAIN (prev) = member;
4877 else
4878 TYPE_FIELDS (type) = member;
4879 prev = member;
4880 }
4881
4882 member = TREE_CHAIN (member);
4883 }
4884
4885 if (prev)
4886 TREE_CHAIN (prev) = NULL_TREE;
4887 else
4888 TYPE_FIELDS (type) = NULL_TREE;
4889
4890 TYPE_METHODS (type) = NULL_TREE;
4891 if (TYPE_BINFO (type))
4892 free_lang_data_in_binfo (TYPE_BINFO (type));
4893 }
4894 else
4895 {
4896 /* For non-aggregate types, clear out the language slot (which
4897 overloads TYPE_BINFO). */
4898 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
4899
4900 if (INTEGRAL_TYPE_P (type)
4901 || SCALAR_FLOAT_TYPE_P (type)
4902 || FIXED_POINT_TYPE_P (type))
4903 {
4904 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
4905 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
4906 }
4907 }
4908
4909 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
4910 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
4911
4912 if (TYPE_CONTEXT (type)
4913 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
4914 {
4915 tree ctx = TYPE_CONTEXT (type);
4916 do
4917 {
4918 ctx = BLOCK_SUPERCONTEXT (ctx);
4919 }
4920 while (ctx && TREE_CODE (ctx) == BLOCK);
4921 TYPE_CONTEXT (type) = ctx;
4922 }
4923 }
4924
4925
4926 /* Return true if DECL may need an assembler name to be set. */
4927
4928 static inline bool
4929 need_assembler_name_p (tree decl)
4930 {
4931 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
4932 if (TREE_CODE (decl) != FUNCTION_DECL
4933 && TREE_CODE (decl) != VAR_DECL)
4934 return false;
4935
4936 /* If DECL already has its assembler name set, it does not need a
4937 new one. */
4938 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
4939 || DECL_ASSEMBLER_NAME_SET_P (decl))
4940 return false;
4941
4942 /* Abstract decls do not need an assembler name. */
4943 if (DECL_ABSTRACT (decl))
4944 return false;
4945
4946 /* For VAR_DECLs, only static, public and external symbols need an
4947 assembler name. */
4948 if (TREE_CODE (decl) == VAR_DECL
4949 && !TREE_STATIC (decl)
4950 && !TREE_PUBLIC (decl)
4951 && !DECL_EXTERNAL (decl))
4952 return false;
4953
4954 if (TREE_CODE (decl) == FUNCTION_DECL)
4955 {
4956 /* Do not set assembler name on builtins. Allow RTL expansion to
4957 decide whether to expand inline or via a regular call. */
4958 if (DECL_BUILT_IN (decl)
4959 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
4960 return false;
4961
4962 /* Functions represented in the callgraph need an assembler name. */
4963 if (cgraph_get_node (decl) != NULL)
4964 return true;
4965
4966 /* Unused and not public functions don't need an assembler name. */
4967 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
4968 return false;
4969 }
4970
4971 return true;
4972 }
4973
4974
4975 /* Reset all language specific information still present in symbol
4976 DECL. */
4977
4978 static void
4979 free_lang_data_in_decl (tree decl)
4980 {
4981 gcc_assert (DECL_P (decl));
4982
4983 /* Give the FE a chance to remove its own data first. */
4984 lang_hooks.free_lang_data (decl);
4985
4986 TREE_LANG_FLAG_0 (decl) = 0;
4987 TREE_LANG_FLAG_1 (decl) = 0;
4988 TREE_LANG_FLAG_2 (decl) = 0;
4989 TREE_LANG_FLAG_3 (decl) = 0;
4990 TREE_LANG_FLAG_4 (decl) = 0;
4991 TREE_LANG_FLAG_5 (decl) = 0;
4992 TREE_LANG_FLAG_6 (decl) = 0;
4993
4994 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
4995 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
4996 if (TREE_CODE (decl) == FIELD_DECL)
4997 {
4998 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
4999 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5000 DECL_QUALIFIER (decl) = NULL_TREE;
5001 }
5002
5003 if (TREE_CODE (decl) == FUNCTION_DECL)
5004 {
5005 struct cgraph_node *node;
5006 if (!(node = cgraph_get_node (decl))
5007 || (!node->definition && !node->clones))
5008 {
5009 if (node)
5010 cgraph_release_function_body (node);
5011 else
5012 {
5013 release_function_body (decl);
5014 DECL_ARGUMENTS (decl) = NULL;
5015 DECL_RESULT (decl) = NULL;
5016 DECL_INITIAL (decl) = error_mark_node;
5017 }
5018 }
5019 if (gimple_has_body_p (decl))
5020 {
5021 tree t;
5022
5023 /* If DECL has a gimple body, then the context for its
5024 arguments must be DECL. Otherwise, it doesn't really
5025 matter, as we will not be emitting any code for DECL. In
5026 general, there may be other instances of DECL created by
5027 the front end and since PARM_DECLs are generally shared,
5028 their DECL_CONTEXT changes as the replicas of DECL are
5029 created. The only time where DECL_CONTEXT is important
5030 is for the FUNCTION_DECLs that have a gimple body (since
5031 the PARM_DECL will be used in the function's body). */
5032 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5033 DECL_CONTEXT (t) = decl;
5034 }
5035
5036 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5037 At this point, it is not needed anymore. */
5038 DECL_SAVED_TREE (decl) = NULL_TREE;
5039
5040 /* Clear the abstract origin if it refers to a method. Otherwise
5041 dwarf2out.c will ICE as we clear TYPE_METHODS and thus the
5042 origin will not be output correctly. */
5043 if (DECL_ABSTRACT_ORIGIN (decl)
5044 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5045 && RECORD_OR_UNION_TYPE_P
5046 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5047 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5048
5049 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5050 DECL_VINDEX referring to itself into a vtable slot number as it
5051 should. Happens with functions that are copied and then forgotten
5052 about. Just clear it, it won't matter anymore. */
5053 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl)))
5054 DECL_VINDEX (decl) = NULL_TREE;
5055 }
5056 else if (TREE_CODE (decl) == VAR_DECL)
5057 {
5058 if ((DECL_EXTERNAL (decl)
5059 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5060 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5061 DECL_INITIAL (decl) = NULL_TREE;
5062 }
5063 else if (TREE_CODE (decl) == TYPE_DECL
5064 || TREE_CODE (decl) == FIELD_DECL)
5065 DECL_INITIAL (decl) = NULL_TREE;
5066 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5067 && DECL_INITIAL (decl)
5068 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5069 {
5070 /* Strip builtins from the translation-unit BLOCK. We still have targets
5071 without builtin_decl_explicit support and also builtins are shared
5072 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5073 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5074 while (*nextp)
5075 {
5076 tree var = *nextp;
5077 if (TREE_CODE (var) == FUNCTION_DECL
5078 && DECL_BUILT_IN (var))
5079 *nextp = TREE_CHAIN (var);
5080 else
5081 nextp = &TREE_CHAIN (var);
5082 }
5083 }
5084 }
5085
5086
5087 /* Data used when collecting DECLs and TYPEs for language data removal. */
5088
5089 struct free_lang_data_d
5090 {
5091 /* Worklist to avoid excessive recursion. */
5092 vec<tree> worklist;
5093
5094 /* Set of traversed objects. Used to avoid duplicate visits. */
5095 struct pointer_set_t *pset;
5096
5097 /* Array of symbols to process with free_lang_data_in_decl. */
5098 vec<tree> decls;
5099
5100 /* Array of types to process with free_lang_data_in_type. */
5101 vec<tree> types;
5102 };
5103
5104
5105 /* Save all language fields needed to generate proper debug information
5106 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5107
5108 static void
5109 save_debug_info_for_decl (tree t)
5110 {
5111 /*struct saved_debug_info_d *sdi;*/
5112
5113 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t));
5114
5115 /* FIXME. Partial implementation for saving debug info removed. */
5116 }
5117
5118
5119 /* Save all language fields needed to generate proper debug information
5120 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5121
5122 static void
5123 save_debug_info_for_type (tree t)
5124 {
5125 /*struct saved_debug_info_d *sdi;*/
5126
5127 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t));
5128
5129 /* FIXME. Partial implementation for saving debug info removed. */
5130 }
5131
5132
5133 /* Add type or decl T to one of the list of tree nodes that need their
5134 language data removed. The lists are held inside FLD. */
5135
5136 static void
5137 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5138 {
5139 if (DECL_P (t))
5140 {
5141 fld->decls.safe_push (t);
5142 if (debug_info_level > DINFO_LEVEL_TERSE)
5143 save_debug_info_for_decl (t);
5144 }
5145 else if (TYPE_P (t))
5146 {
5147 fld->types.safe_push (t);
5148 if (debug_info_level > DINFO_LEVEL_TERSE)
5149 save_debug_info_for_type (t);
5150 }
5151 else
5152 gcc_unreachable ();
5153 }
5154
5155 /* Push tree node T into FLD->WORKLIST. */
5156
5157 static inline void
5158 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5159 {
5160 if (t && !is_lang_specific (t) && !pointer_set_contains (fld->pset, t))
5161 fld->worklist.safe_push ((t));
5162 }
5163
5164
5165 /* Operand callback helper for free_lang_data_in_node. *TP is the
5166 subtree operand being considered. */
5167
5168 static tree
5169 find_decls_types_r (tree *tp, int *ws, void *data)
5170 {
5171 tree t = *tp;
5172 struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5173
5174 if (TREE_CODE (t) == TREE_LIST)
5175 return NULL_TREE;
5176
5177 /* Language specific nodes will be removed, so there is no need
5178 to gather anything under them. */
5179 if (is_lang_specific (t))
5180 {
5181 *ws = 0;
5182 return NULL_TREE;
5183 }
5184
5185 if (DECL_P (t))
5186 {
5187 /* Note that walk_tree does not traverse every possible field in
5188 decls, so we have to do our own traversals here. */
5189 add_tree_to_fld_list (t, fld);
5190
5191 fld_worklist_push (DECL_NAME (t), fld);
5192 fld_worklist_push (DECL_CONTEXT (t), fld);
5193 fld_worklist_push (DECL_SIZE (t), fld);
5194 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5195
5196 /* We are going to remove everything under DECL_INITIAL for
5197 TYPE_DECLs. No point walking them. */
5198 if (TREE_CODE (t) != TYPE_DECL)
5199 fld_worklist_push (DECL_INITIAL (t), fld);
5200
5201 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5202 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5203
5204 if (TREE_CODE (t) == FUNCTION_DECL)
5205 {
5206 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5207 fld_worklist_push (DECL_RESULT (t), fld);
5208 }
5209 else if (TREE_CODE (t) == TYPE_DECL)
5210 {
5211 fld_worklist_push (DECL_ARGUMENT_FLD (t), fld);
5212 fld_worklist_push (DECL_VINDEX (t), fld);
5213 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
5214 }
5215 else if (TREE_CODE (t) == FIELD_DECL)
5216 {
5217 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5218 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5219 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5220 fld_worklist_push (DECL_FCONTEXT (t), fld);
5221 }
5222 else if (TREE_CODE (t) == VAR_DECL)
5223 {
5224 fld_worklist_push (DECL_SECTION_NAME (t), fld);
5225 fld_worklist_push (DECL_COMDAT_GROUP (t), fld);
5226 }
5227
5228 if ((TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
5229 && DECL_HAS_VALUE_EXPR_P (t))
5230 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5231
5232 if (TREE_CODE (t) != FIELD_DECL
5233 && TREE_CODE (t) != TYPE_DECL)
5234 fld_worklist_push (TREE_CHAIN (t), fld);
5235 *ws = 0;
5236 }
5237 else if (TYPE_P (t))
5238 {
5239 /* Note that walk_tree does not traverse every possible field in
5240 types, so we have to do our own traversals here. */
5241 add_tree_to_fld_list (t, fld);
5242
5243 if (!RECORD_OR_UNION_TYPE_P (t))
5244 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5245 fld_worklist_push (TYPE_SIZE (t), fld);
5246 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5247 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5248 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5249 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5250 fld_worklist_push (TYPE_NAME (t), fld);
5251 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5252 them and thus do not and want not to reach unused pointer types
5253 this way. */
5254 if (!POINTER_TYPE_P (t))
5255 fld_worklist_push (TYPE_MINVAL (t), fld);
5256 if (!RECORD_OR_UNION_TYPE_P (t))
5257 fld_worklist_push (TYPE_MAXVAL (t), fld);
5258 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5259 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5260 do not and want not to reach unused variants this way. */
5261 if (TYPE_CONTEXT (t))
5262 {
5263 tree ctx = TYPE_CONTEXT (t);
5264 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5265 So push that instead. */
5266 while (ctx && TREE_CODE (ctx) == BLOCK)
5267 ctx = BLOCK_SUPERCONTEXT (ctx);
5268 fld_worklist_push (ctx, fld);
5269 }
5270 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5271 and want not to reach unused types this way. */
5272
5273 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5274 {
5275 unsigned i;
5276 tree tem;
5277 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5278 fld_worklist_push (TREE_TYPE (tem), fld);
5279 tem = BINFO_VIRTUALS (TYPE_BINFO (t));
5280 if (tem
5281 /* The Java FE overloads BINFO_VIRTUALS for its own purpose. */
5282 && TREE_CODE (tem) == TREE_LIST)
5283 do
5284 {
5285 fld_worklist_push (TREE_VALUE (tem), fld);
5286 tem = TREE_CHAIN (tem);
5287 }
5288 while (tem);
5289 }
5290 if (RECORD_OR_UNION_TYPE_P (t))
5291 {
5292 tree tem;
5293 /* Push all TYPE_FIELDS - there can be interleaving interesting
5294 and non-interesting things. */
5295 tem = TYPE_FIELDS (t);
5296 while (tem)
5297 {
5298 if (TREE_CODE (tem) == FIELD_DECL
5299 || TREE_CODE (tem) == TYPE_DECL)
5300 fld_worklist_push (tem, fld);
5301 tem = TREE_CHAIN (tem);
5302 }
5303 }
5304
5305 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5306 *ws = 0;
5307 }
5308 else if (TREE_CODE (t) == BLOCK)
5309 {
5310 tree tem;
5311 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
5312 fld_worklist_push (tem, fld);
5313 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5314 fld_worklist_push (tem, fld);
5315 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5316 }
5317
5318 if (TREE_CODE (t) != IDENTIFIER_NODE
5319 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5320 fld_worklist_push (TREE_TYPE (t), fld);
5321
5322 return NULL_TREE;
5323 }
5324
5325
5326 /* Find decls and types in T. */
5327
5328 static void
5329 find_decls_types (tree t, struct free_lang_data_d *fld)
5330 {
5331 while (1)
5332 {
5333 if (!pointer_set_contains (fld->pset, t))
5334 walk_tree (&t, find_decls_types_r, fld, fld->pset);
5335 if (fld->worklist.is_empty ())
5336 break;
5337 t = fld->worklist.pop ();
5338 }
5339 }
5340
5341 /* Translate all the types in LIST with the corresponding runtime
5342 types. */
5343
5344 static tree
5345 get_eh_types_for_runtime (tree list)
5346 {
5347 tree head, prev;
5348
5349 if (list == NULL_TREE)
5350 return NULL_TREE;
5351
5352 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5353 prev = head;
5354 list = TREE_CHAIN (list);
5355 while (list)
5356 {
5357 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5358 TREE_CHAIN (prev) = n;
5359 prev = TREE_CHAIN (prev);
5360 list = TREE_CHAIN (list);
5361 }
5362
5363 return head;
5364 }
5365
5366
5367 /* Find decls and types referenced in EH region R and store them in
5368 FLD->DECLS and FLD->TYPES. */
5369
5370 static void
5371 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5372 {
5373 switch (r->type)
5374 {
5375 case ERT_CLEANUP:
5376 break;
5377
5378 case ERT_TRY:
5379 {
5380 eh_catch c;
5381
5382 /* The types referenced in each catch must first be changed to the
5383 EH types used at runtime. This removes references to FE types
5384 in the region. */
5385 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5386 {
5387 c->type_list = get_eh_types_for_runtime (c->type_list);
5388 walk_tree (&c->type_list, find_decls_types_r, fld, fld->pset);
5389 }
5390 }
5391 break;
5392
5393 case ERT_ALLOWED_EXCEPTIONS:
5394 r->u.allowed.type_list
5395 = get_eh_types_for_runtime (r->u.allowed.type_list);
5396 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, fld->pset);
5397 break;
5398
5399 case ERT_MUST_NOT_THROW:
5400 walk_tree (&r->u.must_not_throw.failure_decl,
5401 find_decls_types_r, fld, fld->pset);
5402 break;
5403 }
5404 }
5405
5406
5407 /* Find decls and types referenced in cgraph node N and store them in
5408 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5409 look for *every* kind of DECL and TYPE node reachable from N,
5410 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5411 NAMESPACE_DECLs, etc). */
5412
5413 static void
5414 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5415 {
5416 basic_block bb;
5417 struct function *fn;
5418 unsigned ix;
5419 tree t;
5420
5421 find_decls_types (n->decl, fld);
5422
5423 if (!gimple_has_body_p (n->decl))
5424 return;
5425
5426 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5427
5428 fn = DECL_STRUCT_FUNCTION (n->decl);
5429
5430 /* Traverse locals. */
5431 FOR_EACH_LOCAL_DECL (fn, ix, t)
5432 find_decls_types (t, fld);
5433
5434 /* Traverse EH regions in FN. */
5435 {
5436 eh_region r;
5437 FOR_ALL_EH_REGION_FN (r, fn)
5438 find_decls_types_in_eh_region (r, fld);
5439 }
5440
5441 /* Traverse every statement in FN. */
5442 FOR_EACH_BB_FN (bb, fn)
5443 {
5444 gimple_stmt_iterator si;
5445 unsigned i;
5446
5447 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
5448 {
5449 gimple phi = gsi_stmt (si);
5450
5451 for (i = 0; i < gimple_phi_num_args (phi); i++)
5452 {
5453 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5454 find_decls_types (*arg_p, fld);
5455 }
5456 }
5457
5458 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5459 {
5460 gimple stmt = gsi_stmt (si);
5461
5462 if (is_gimple_call (stmt))
5463 find_decls_types (gimple_call_fntype (stmt), fld);
5464
5465 for (i = 0; i < gimple_num_ops (stmt); i++)
5466 {
5467 tree arg = gimple_op (stmt, i);
5468 find_decls_types (arg, fld);
5469 }
5470 }
5471 }
5472 }
5473
5474
5475 /* Find decls and types referenced in varpool node N and store them in
5476 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5477 look for *every* kind of DECL and TYPE node reachable from N,
5478 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5479 NAMESPACE_DECLs, etc). */
5480
5481 static void
5482 find_decls_types_in_var (struct varpool_node *v, struct free_lang_data_d *fld)
5483 {
5484 find_decls_types (v->decl, fld);
5485 }
5486
5487 /* If T needs an assembler name, have one created for it. */
5488
5489 void
5490 assign_assembler_name_if_neeeded (tree t)
5491 {
5492 if (need_assembler_name_p (t))
5493 {
5494 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5495 diagnostics that use input_location to show locus
5496 information. The problem here is that, at this point,
5497 input_location is generally anchored to the end of the file
5498 (since the parser is long gone), so we don't have a good
5499 position to pin it to.
5500
5501 To alleviate this problem, this uses the location of T's
5502 declaration. Examples of this are
5503 testsuite/g++.dg/template/cond2.C and
5504 testsuite/g++.dg/template/pr35240.C. */
5505 location_t saved_location = input_location;
5506 input_location = DECL_SOURCE_LOCATION (t);
5507
5508 decl_assembler_name (t);
5509
5510 input_location = saved_location;
5511 }
5512 }
5513
5514
5515 /* Free language specific information for every operand and expression
5516 in every node of the call graph. This process operates in three stages:
5517
5518 1- Every callgraph node and varpool node is traversed looking for
5519 decls and types embedded in them. This is a more exhaustive
5520 search than that done by find_referenced_vars, because it will
5521 also collect individual fields, decls embedded in types, etc.
5522
5523 2- All the decls found are sent to free_lang_data_in_decl.
5524
5525 3- All the types found are sent to free_lang_data_in_type.
5526
5527 The ordering between decls and types is important because
5528 free_lang_data_in_decl sets assembler names, which includes
5529 mangling. So types cannot be freed up until assembler names have
5530 been set up. */
5531
5532 static void
5533 free_lang_data_in_cgraph (void)
5534 {
5535 struct cgraph_node *n;
5536 struct varpool_node *v;
5537 struct free_lang_data_d fld;
5538 tree t;
5539 unsigned i;
5540 alias_pair *p;
5541
5542 /* Initialize sets and arrays to store referenced decls and types. */
5543 fld.pset = pointer_set_create ();
5544 fld.worklist.create (0);
5545 fld.decls.create (100);
5546 fld.types.create (100);
5547
5548 /* Find decls and types in the body of every function in the callgraph. */
5549 FOR_EACH_FUNCTION (n)
5550 find_decls_types_in_node (n, &fld);
5551
5552 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
5553 find_decls_types (p->decl, &fld);
5554
5555 /* Find decls and types in every varpool symbol. */
5556 FOR_EACH_VARIABLE (v)
5557 find_decls_types_in_var (v, &fld);
5558
5559 /* Set the assembler name on every decl found. We need to do this
5560 now because free_lang_data_in_decl will invalidate data needed
5561 for mangling. This breaks mangling on interdependent decls. */
5562 FOR_EACH_VEC_ELT (fld.decls, i, t)
5563 assign_assembler_name_if_neeeded (t);
5564
5565 /* Traverse every decl found freeing its language data. */
5566 FOR_EACH_VEC_ELT (fld.decls, i, t)
5567 free_lang_data_in_decl (t);
5568
5569 /* Traverse every type found freeing its language data. */
5570 FOR_EACH_VEC_ELT (fld.types, i, t)
5571 free_lang_data_in_type (t);
5572
5573 pointer_set_destroy (fld.pset);
5574 fld.worklist.release ();
5575 fld.decls.release ();
5576 fld.types.release ();
5577 }
5578
5579
5580 /* Free resources that are used by FE but are not needed once they are done. */
5581
5582 static unsigned
5583 free_lang_data (void)
5584 {
5585 unsigned i;
5586
5587 /* If we are the LTO frontend we have freed lang-specific data already. */
5588 if (in_lto_p
5589 || !flag_generate_lto)
5590 return 0;
5591
5592 /* Allocate and assign alias sets to the standard integer types
5593 while the slots are still in the way the frontends generated them. */
5594 for (i = 0; i < itk_none; ++i)
5595 if (integer_types[i])
5596 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
5597
5598 /* Traverse the IL resetting language specific information for
5599 operands, expressions, etc. */
5600 free_lang_data_in_cgraph ();
5601
5602 /* Create gimple variants for common types. */
5603 ptrdiff_type_node = integer_type_node;
5604 fileptr_type_node = ptr_type_node;
5605
5606 /* Reset some langhooks. Do not reset types_compatible_p, it may
5607 still be used indirectly via the get_alias_set langhook. */
5608 lang_hooks.dwarf_name = lhd_dwarf_name;
5609 lang_hooks.decl_printable_name = gimple_decl_printable_name;
5610 /* We do not want the default decl_assembler_name implementation,
5611 rather if we have fixed everything we want a wrapper around it
5612 asserting that all non-local symbols already got their assembler
5613 name and only produce assembler names for local symbols. Or rather
5614 make sure we never call decl_assembler_name on local symbols and
5615 devise a separate, middle-end private scheme for it. */
5616
5617 /* Reset diagnostic machinery. */
5618 tree_diagnostics_defaults (global_dc);
5619
5620 return 0;
5621 }
5622
5623
5624 namespace {
5625
5626 const pass_data pass_data_ipa_free_lang_data =
5627 {
5628 SIMPLE_IPA_PASS, /* type */
5629 "*free_lang_data", /* name */
5630 OPTGROUP_NONE, /* optinfo_flags */
5631 false, /* has_gate */
5632 true, /* has_execute */
5633 TV_IPA_FREE_LANG_DATA, /* tv_id */
5634 0, /* properties_required */
5635 0, /* properties_provided */
5636 0, /* properties_destroyed */
5637 0, /* todo_flags_start */
5638 0, /* todo_flags_finish */
5639 };
5640
5641 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
5642 {
5643 public:
5644 pass_ipa_free_lang_data (gcc::context *ctxt)
5645 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
5646 {}
5647
5648 /* opt_pass methods: */
5649 unsigned int execute () { return free_lang_data (); }
5650
5651 }; // class pass_ipa_free_lang_data
5652
5653 } // anon namespace
5654
5655 simple_ipa_opt_pass *
5656 make_pass_ipa_free_lang_data (gcc::context *ctxt)
5657 {
5658 return new pass_ipa_free_lang_data (ctxt);
5659 }
5660
5661 /* The backbone of is_attribute_p(). ATTR_LEN is the string length of
5662 ATTR_NAME. Also used internally by remove_attribute(). */
5663 bool
5664 private_is_attribute_p (const char *attr_name, size_t attr_len, const_tree ident)
5665 {
5666 size_t ident_len = IDENTIFIER_LENGTH (ident);
5667
5668 if (ident_len == attr_len)
5669 {
5670 if (strcmp (attr_name, IDENTIFIER_POINTER (ident)) == 0)
5671 return true;
5672 }
5673 else if (ident_len == attr_len + 4)
5674 {
5675 /* There is the possibility that ATTR is 'text' and IDENT is
5676 '__text__'. */
5677 const char *p = IDENTIFIER_POINTER (ident);
5678 if (p[0] == '_' && p[1] == '_'
5679 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5680 && strncmp (attr_name, p + 2, attr_len) == 0)
5681 return true;
5682 }
5683
5684 return false;
5685 }
5686
5687 /* The backbone of lookup_attribute(). ATTR_LEN is the string length
5688 of ATTR_NAME, and LIST is not NULL_TREE. */
5689 tree
5690 private_lookup_attribute (const char *attr_name, size_t attr_len, tree list)
5691 {
5692 while (list)
5693 {
5694 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5695
5696 if (ident_len == attr_len)
5697 {
5698 if (!strcmp (attr_name,
5699 IDENTIFIER_POINTER (get_attribute_name (list))))
5700 break;
5701 }
5702 /* TODO: If we made sure that attributes were stored in the
5703 canonical form without '__...__' (ie, as in 'text' as opposed
5704 to '__text__') then we could avoid the following case. */
5705 else if (ident_len == attr_len + 4)
5706 {
5707 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5708 if (p[0] == '_' && p[1] == '_'
5709 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5710 && strncmp (attr_name, p + 2, attr_len) == 0)
5711 break;
5712 }
5713 list = TREE_CHAIN (list);
5714 }
5715
5716 return list;
5717 }
5718
5719 /* A variant of lookup_attribute() that can be used with an identifier
5720 as the first argument, and where the identifier can be either
5721 'text' or '__text__'.
5722
5723 Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST,
5724 return a pointer to the attribute's list element if the attribute
5725 is part of the list, or NULL_TREE if not found. If the attribute
5726 appears more than once, this only returns the first occurrence; the
5727 TREE_CHAIN of the return value should be passed back in if further
5728 occurrences are wanted. ATTR_IDENTIFIER must be an identifier but
5729 can be in the form 'text' or '__text__'. */
5730 static tree
5731 lookup_ident_attribute (tree attr_identifier, tree list)
5732 {
5733 gcc_checking_assert (TREE_CODE (attr_identifier) == IDENTIFIER_NODE);
5734
5735 while (list)
5736 {
5737 gcc_checking_assert (TREE_CODE (get_attribute_name (list))
5738 == IDENTIFIER_NODE);
5739
5740 /* Identifiers can be compared directly for equality. */
5741 if (attr_identifier == get_attribute_name (list))
5742 break;
5743
5744 /* If they are not equal, they may still be one in the form
5745 'text' while the other one is in the form '__text__'. TODO:
5746 If we were storing attributes in normalized 'text' form, then
5747 this could all go away and we could take full advantage of
5748 the fact that we're comparing identifiers. :-) */
5749 {
5750 size_t attr_len = IDENTIFIER_LENGTH (attr_identifier);
5751 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5752
5753 if (ident_len == attr_len + 4)
5754 {
5755 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5756 const char *q = IDENTIFIER_POINTER (attr_identifier);
5757 if (p[0] == '_' && p[1] == '_'
5758 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5759 && strncmp (q, p + 2, attr_len) == 0)
5760 break;
5761 }
5762 else if (ident_len + 4 == attr_len)
5763 {
5764 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5765 const char *q = IDENTIFIER_POINTER (attr_identifier);
5766 if (q[0] == '_' && q[1] == '_'
5767 && q[attr_len - 2] == '_' && q[attr_len - 1] == '_'
5768 && strncmp (q + 2, p, ident_len) == 0)
5769 break;
5770 }
5771 }
5772 list = TREE_CHAIN (list);
5773 }
5774
5775 return list;
5776 }
5777
5778 /* Remove any instances of attribute ATTR_NAME in LIST and return the
5779 modified list. */
5780
5781 tree
5782 remove_attribute (const char *attr_name, tree list)
5783 {
5784 tree *p;
5785 size_t attr_len = strlen (attr_name);
5786
5787 gcc_checking_assert (attr_name[0] != '_');
5788
5789 for (p = &list; *p; )
5790 {
5791 tree l = *p;
5792 /* TODO: If we were storing attributes in normalized form, here
5793 we could use a simple strcmp(). */
5794 if (private_is_attribute_p (attr_name, attr_len, get_attribute_name (l)))
5795 *p = TREE_CHAIN (l);
5796 else
5797 p = &TREE_CHAIN (l);
5798 }
5799
5800 return list;
5801 }
5802
5803 /* Return an attribute list that is the union of a1 and a2. */
5804
5805 tree
5806 merge_attributes (tree a1, tree a2)
5807 {
5808 tree attributes;
5809
5810 /* Either one unset? Take the set one. */
5811
5812 if ((attributes = a1) == 0)
5813 attributes = a2;
5814
5815 /* One that completely contains the other? Take it. */
5816
5817 else if (a2 != 0 && ! attribute_list_contained (a1, a2))
5818 {
5819 if (attribute_list_contained (a2, a1))
5820 attributes = a2;
5821 else
5822 {
5823 /* Pick the longest list, and hang on the other list. */
5824
5825 if (list_length (a1) < list_length (a2))
5826 attributes = a2, a2 = a1;
5827
5828 for (; a2 != 0; a2 = TREE_CHAIN (a2))
5829 {
5830 tree a;
5831 for (a = lookup_ident_attribute (get_attribute_name (a2),
5832 attributes);
5833 a != NULL_TREE && !attribute_value_equal (a, a2);
5834 a = lookup_ident_attribute (get_attribute_name (a2),
5835 TREE_CHAIN (a)))
5836 ;
5837 if (a == NULL_TREE)
5838 {
5839 a1 = copy_node (a2);
5840 TREE_CHAIN (a1) = attributes;
5841 attributes = a1;
5842 }
5843 }
5844 }
5845 }
5846 return attributes;
5847 }
5848
5849 /* Given types T1 and T2, merge their attributes and return
5850 the result. */
5851
5852 tree
5853 merge_type_attributes (tree t1, tree t2)
5854 {
5855 return merge_attributes (TYPE_ATTRIBUTES (t1),
5856 TYPE_ATTRIBUTES (t2));
5857 }
5858
5859 /* Given decls OLDDECL and NEWDECL, merge their attributes and return
5860 the result. */
5861
5862 tree
5863 merge_decl_attributes (tree olddecl, tree newdecl)
5864 {
5865 return merge_attributes (DECL_ATTRIBUTES (olddecl),
5866 DECL_ATTRIBUTES (newdecl));
5867 }
5868
5869 #if TARGET_DLLIMPORT_DECL_ATTRIBUTES
5870
5871 /* Specialization of merge_decl_attributes for various Windows targets.
5872
5873 This handles the following situation:
5874
5875 __declspec (dllimport) int foo;
5876 int foo;
5877
5878 The second instance of `foo' nullifies the dllimport. */
5879
5880 tree
5881 merge_dllimport_decl_attributes (tree old, tree new_tree)
5882 {
5883 tree a;
5884 int delete_dllimport_p = 1;
5885
5886 /* What we need to do here is remove from `old' dllimport if it doesn't
5887 appear in `new'. dllimport behaves like extern: if a declaration is
5888 marked dllimport and a definition appears later, then the object
5889 is not dllimport'd. We also remove a `new' dllimport if the old list
5890 contains dllexport: dllexport always overrides dllimport, regardless
5891 of the order of declaration. */
5892 if (!VAR_OR_FUNCTION_DECL_P (new_tree))
5893 delete_dllimport_p = 0;
5894 else if (DECL_DLLIMPORT_P (new_tree)
5895 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old)))
5896 {
5897 DECL_DLLIMPORT_P (new_tree) = 0;
5898 warning (OPT_Wattributes, "%q+D already declared with dllexport attribute: "
5899 "dllimport ignored", new_tree);
5900 }
5901 else if (DECL_DLLIMPORT_P (old) && !DECL_DLLIMPORT_P (new_tree))
5902 {
5903 /* Warn about overriding a symbol that has already been used, e.g.:
5904 extern int __attribute__ ((dllimport)) foo;
5905 int* bar () {return &foo;}
5906 int foo;
5907 */
5908 if (TREE_USED (old))
5909 {
5910 warning (0, "%q+D redeclared without dllimport attribute "
5911 "after being referenced with dll linkage", new_tree);
5912 /* If we have used a variable's address with dllimport linkage,
5913 keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the
5914 decl may already have had TREE_CONSTANT computed.
5915 We still remove the attribute so that assembler code refers
5916 to '&foo rather than '_imp__foo'. */
5917 if (TREE_CODE (old) == VAR_DECL && TREE_ADDRESSABLE (old))
5918 DECL_DLLIMPORT_P (new_tree) = 1;
5919 }
5920
5921 /* Let an inline definition silently override the external reference,
5922 but otherwise warn about attribute inconsistency. */
5923 else if (TREE_CODE (new_tree) == VAR_DECL
5924 || !DECL_DECLARED_INLINE_P (new_tree))
5925 warning (OPT_Wattributes, "%q+D redeclared without dllimport attribute: "
5926 "previous dllimport ignored", new_tree);
5927 }
5928 else
5929 delete_dllimport_p = 0;
5930
5931 a = merge_attributes (DECL_ATTRIBUTES (old), DECL_ATTRIBUTES (new_tree));
5932
5933 if (delete_dllimport_p)
5934 a = remove_attribute ("dllimport", a);
5935
5936 return a;
5937 }
5938
5939 /* Handle a "dllimport" or "dllexport" attribute; arguments as in
5940 struct attribute_spec.handler. */
5941
5942 tree
5943 handle_dll_attribute (tree * pnode, tree name, tree args, int flags,
5944 bool *no_add_attrs)
5945 {
5946 tree node = *pnode;
5947 bool is_dllimport;
5948
5949 /* These attributes may apply to structure and union types being created,
5950 but otherwise should pass to the declaration involved. */
5951 if (!DECL_P (node))
5952 {
5953 if (flags & ((int) ATTR_FLAG_DECL_NEXT | (int) ATTR_FLAG_FUNCTION_NEXT
5954 | (int) ATTR_FLAG_ARRAY_NEXT))
5955 {
5956 *no_add_attrs = true;
5957 return tree_cons (name, args, NULL_TREE);
5958 }
5959 if (TREE_CODE (node) == RECORD_TYPE
5960 || TREE_CODE (node) == UNION_TYPE)
5961 {
5962 node = TYPE_NAME (node);
5963 if (!node)
5964 return NULL_TREE;
5965 }
5966 else
5967 {
5968 warning (OPT_Wattributes, "%qE attribute ignored",
5969 name);
5970 *no_add_attrs = true;
5971 return NULL_TREE;
5972 }
5973 }
5974
5975 if (TREE_CODE (node) != FUNCTION_DECL
5976 && TREE_CODE (node) != VAR_DECL
5977 && TREE_CODE (node) != TYPE_DECL)
5978 {
5979 *no_add_attrs = true;
5980 warning (OPT_Wattributes, "%qE attribute ignored",
5981 name);
5982 return NULL_TREE;
5983 }
5984
5985 if (TREE_CODE (node) == TYPE_DECL
5986 && TREE_CODE (TREE_TYPE (node)) != RECORD_TYPE
5987 && TREE_CODE (TREE_TYPE (node)) != UNION_TYPE)
5988 {
5989 *no_add_attrs = true;
5990 warning (OPT_Wattributes, "%qE attribute ignored",
5991 name);
5992 return NULL_TREE;
5993 }
5994
5995 is_dllimport = is_attribute_p ("dllimport", name);
5996
5997 /* Report error on dllimport ambiguities seen now before they cause
5998 any damage. */
5999 if (is_dllimport)
6000 {
6001 /* Honor any target-specific overrides. */
6002 if (!targetm.valid_dllimport_attribute_p (node))
6003 *no_add_attrs = true;
6004
6005 else if (TREE_CODE (node) == FUNCTION_DECL
6006 && DECL_DECLARED_INLINE_P (node))
6007 {
6008 warning (OPT_Wattributes, "inline function %q+D declared as "
6009 " dllimport: attribute ignored", node);
6010 *no_add_attrs = true;
6011 }
6012 /* Like MS, treat definition of dllimported variables and
6013 non-inlined functions on declaration as syntax errors. */
6014 else if (TREE_CODE (node) == FUNCTION_DECL && DECL_INITIAL (node))
6015 {
6016 error ("function %q+D definition is marked dllimport", node);
6017 *no_add_attrs = true;
6018 }
6019
6020 else if (TREE_CODE (node) == VAR_DECL)
6021 {
6022 if (DECL_INITIAL (node))
6023 {
6024 error ("variable %q+D definition is marked dllimport",
6025 node);
6026 *no_add_attrs = true;
6027 }
6028
6029 /* `extern' needn't be specified with dllimport.
6030 Specify `extern' now and hope for the best. Sigh. */
6031 DECL_EXTERNAL (node) = 1;
6032 /* Also, implicitly give dllimport'd variables declared within
6033 a function global scope, unless declared static. */
6034 if (current_function_decl != NULL_TREE && !TREE_STATIC (node))
6035 TREE_PUBLIC (node) = 1;
6036 }
6037
6038 if (*no_add_attrs == false)
6039 DECL_DLLIMPORT_P (node) = 1;
6040 }
6041 else if (TREE_CODE (node) == FUNCTION_DECL
6042 && DECL_DECLARED_INLINE_P (node)
6043 && flag_keep_inline_dllexport)
6044 /* An exported function, even if inline, must be emitted. */
6045 DECL_EXTERNAL (node) = 0;
6046
6047 /* Report error if symbol is not accessible at global scope. */
6048 if (!TREE_PUBLIC (node)
6049 && (TREE_CODE (node) == VAR_DECL
6050 || TREE_CODE (node) == FUNCTION_DECL))
6051 {
6052 error ("external linkage required for symbol %q+D because of "
6053 "%qE attribute", node, name);
6054 *no_add_attrs = true;
6055 }
6056
6057 /* A dllexport'd entity must have default visibility so that other
6058 program units (shared libraries or the main executable) can see
6059 it. A dllimport'd entity must have default visibility so that
6060 the linker knows that undefined references within this program
6061 unit can be resolved by the dynamic linker. */
6062 if (!*no_add_attrs)
6063 {
6064 if (DECL_VISIBILITY_SPECIFIED (node)
6065 && DECL_VISIBILITY (node) != VISIBILITY_DEFAULT)
6066 error ("%qE implies default visibility, but %qD has already "
6067 "been declared with a different visibility",
6068 name, node);
6069 DECL_VISIBILITY (node) = VISIBILITY_DEFAULT;
6070 DECL_VISIBILITY_SPECIFIED (node) = 1;
6071 }
6072
6073 return NULL_TREE;
6074 }
6075
6076 #endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */
6077 \f
6078 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6079 of the various TYPE_QUAL values. */
6080
6081 static void
6082 set_type_quals (tree type, int type_quals)
6083 {
6084 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6085 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6086 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6087 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6088 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6089 }
6090
6091 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6092
6093 bool
6094 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6095 {
6096 return (TYPE_QUALS (cand) == type_quals
6097 && TYPE_NAME (cand) == TYPE_NAME (base)
6098 /* Apparently this is needed for Objective-C. */
6099 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6100 /* Check alignment. */
6101 && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
6102 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6103 TYPE_ATTRIBUTES (base)));
6104 }
6105
6106 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6107
6108 static bool
6109 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6110 {
6111 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6112 && TYPE_NAME (cand) == TYPE_NAME (base)
6113 /* Apparently this is needed for Objective-C. */
6114 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6115 /* Check alignment. */
6116 && TYPE_ALIGN (cand) == align
6117 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6118 TYPE_ATTRIBUTES (base)));
6119 }
6120
6121 /* This function checks to see if TYPE matches the size one of the built-in
6122 atomic types, and returns that core atomic type. */
6123
6124 static tree
6125 find_atomic_core_type (tree type)
6126 {
6127 tree base_atomic_type;
6128
6129 /* Only handle complete types. */
6130 if (TYPE_SIZE (type) == NULL_TREE)
6131 return NULL_TREE;
6132
6133 HOST_WIDE_INT type_size = tree_to_uhwi (TYPE_SIZE (type));
6134 switch (type_size)
6135 {
6136 case 8:
6137 base_atomic_type = atomicQI_type_node;
6138 break;
6139
6140 case 16:
6141 base_atomic_type = atomicHI_type_node;
6142 break;
6143
6144 case 32:
6145 base_atomic_type = atomicSI_type_node;
6146 break;
6147
6148 case 64:
6149 base_atomic_type = atomicDI_type_node;
6150 break;
6151
6152 case 128:
6153 base_atomic_type = atomicTI_type_node;
6154 break;
6155
6156 default:
6157 base_atomic_type = NULL_TREE;
6158 }
6159
6160 return base_atomic_type;
6161 }
6162
6163 /* Return a version of the TYPE, qualified as indicated by the
6164 TYPE_QUALS, if one exists. If no qualified version exists yet,
6165 return NULL_TREE. */
6166
6167 tree
6168 get_qualified_type (tree type, int type_quals)
6169 {
6170 tree t;
6171
6172 if (TYPE_QUALS (type) == type_quals)
6173 return type;
6174
6175 /* Search the chain of variants to see if there is already one there just
6176 like the one we need to have. If so, use that existing one. We must
6177 preserve the TYPE_NAME, since there is code that depends on this. */
6178 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6179 if (check_qualified_type (t, type, type_quals))
6180 return t;
6181
6182 return NULL_TREE;
6183 }
6184
6185 /* Like get_qualified_type, but creates the type if it does not
6186 exist. This function never returns NULL_TREE. */
6187
6188 tree
6189 build_qualified_type (tree type, int type_quals)
6190 {
6191 tree t;
6192
6193 /* See if we already have the appropriate qualified variant. */
6194 t = get_qualified_type (type, type_quals);
6195
6196 /* If not, build it. */
6197 if (!t)
6198 {
6199 t = build_variant_type_copy (type);
6200 set_type_quals (t, type_quals);
6201
6202 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6203 {
6204 /* See if this object can map to a basic atomic type. */
6205 tree atomic_type = find_atomic_core_type (type);
6206 if (atomic_type)
6207 {
6208 /* Ensure the alignment of this type is compatible with
6209 the required alignment of the atomic type. */
6210 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6211 TYPE_ALIGN (t) = TYPE_ALIGN (atomic_type);
6212 }
6213 }
6214
6215 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6216 /* Propagate structural equality. */
6217 SET_TYPE_STRUCTURAL_EQUALITY (t);
6218 else if (TYPE_CANONICAL (type) != type)
6219 /* Build the underlying canonical type, since it is different
6220 from TYPE. */
6221 TYPE_CANONICAL (t) = build_qualified_type (TYPE_CANONICAL (type),
6222 type_quals);
6223 else
6224 /* T is its own canonical type. */
6225 TYPE_CANONICAL (t) = t;
6226
6227 }
6228
6229 return t;
6230 }
6231
6232 /* Create a variant of type T with alignment ALIGN. */
6233
6234 tree
6235 build_aligned_type (tree type, unsigned int align)
6236 {
6237 tree t;
6238
6239 if (TYPE_PACKED (type)
6240 || TYPE_ALIGN (type) == align)
6241 return type;
6242
6243 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6244 if (check_aligned_type (t, type, align))
6245 return t;
6246
6247 t = build_variant_type_copy (type);
6248 TYPE_ALIGN (t) = align;
6249
6250 return t;
6251 }
6252
6253 /* Create a new distinct copy of TYPE. The new type is made its own
6254 MAIN_VARIANT. If TYPE requires structural equality checks, the
6255 resulting type requires structural equality checks; otherwise, its
6256 TYPE_CANONICAL points to itself. */
6257
6258 tree
6259 build_distinct_type_copy (tree type)
6260 {
6261 tree t = copy_node (type);
6262
6263 TYPE_POINTER_TO (t) = 0;
6264 TYPE_REFERENCE_TO (t) = 0;
6265
6266 /* Set the canonical type either to a new equivalence class, or
6267 propagate the need for structural equality checks. */
6268 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6269 SET_TYPE_STRUCTURAL_EQUALITY (t);
6270 else
6271 TYPE_CANONICAL (t) = t;
6272
6273 /* Make it its own variant. */
6274 TYPE_MAIN_VARIANT (t) = t;
6275 TYPE_NEXT_VARIANT (t) = 0;
6276
6277 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6278 whose TREE_TYPE is not t. This can also happen in the Ada
6279 frontend when using subtypes. */
6280
6281 return t;
6282 }
6283
6284 /* Create a new variant of TYPE, equivalent but distinct. This is so
6285 the caller can modify it. TYPE_CANONICAL for the return type will
6286 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6287 are considered equal by the language itself (or that both types
6288 require structural equality checks). */
6289
6290 tree
6291 build_variant_type_copy (tree type)
6292 {
6293 tree t, m = TYPE_MAIN_VARIANT (type);
6294
6295 t = build_distinct_type_copy (type);
6296
6297 /* Since we're building a variant, assume that it is a non-semantic
6298 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6299 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6300
6301 /* Add the new type to the chain of variants of TYPE. */
6302 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6303 TYPE_NEXT_VARIANT (m) = t;
6304 TYPE_MAIN_VARIANT (t) = m;
6305
6306 return t;
6307 }
6308 \f
6309 /* Return true if the from tree in both tree maps are equal. */
6310
6311 int
6312 tree_map_base_eq (const void *va, const void *vb)
6313 {
6314 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6315 *const b = (const struct tree_map_base *) vb;
6316 return (a->from == b->from);
6317 }
6318
6319 /* Hash a from tree in a tree_base_map. */
6320
6321 unsigned int
6322 tree_map_base_hash (const void *item)
6323 {
6324 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6325 }
6326
6327 /* Return true if this tree map structure is marked for garbage collection
6328 purposes. We simply return true if the from tree is marked, so that this
6329 structure goes away when the from tree goes away. */
6330
6331 int
6332 tree_map_base_marked_p (const void *p)
6333 {
6334 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6335 }
6336
6337 /* Hash a from tree in a tree_map. */
6338
6339 unsigned int
6340 tree_map_hash (const void *item)
6341 {
6342 return (((const struct tree_map *) item)->hash);
6343 }
6344
6345 /* Hash a from tree in a tree_decl_map. */
6346
6347 unsigned int
6348 tree_decl_map_hash (const void *item)
6349 {
6350 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6351 }
6352
6353 /* Return the initialization priority for DECL. */
6354
6355 priority_type
6356 decl_init_priority_lookup (tree decl)
6357 {
6358 struct tree_priority_map *h;
6359 struct tree_map_base in;
6360
6361 gcc_assert (VAR_OR_FUNCTION_DECL_P (decl));
6362 in.from = decl;
6363 h = (struct tree_priority_map *) htab_find (init_priority_for_decl, &in);
6364 return h ? h->init : DEFAULT_INIT_PRIORITY;
6365 }
6366
6367 /* Return the finalization priority for DECL. */
6368
6369 priority_type
6370 decl_fini_priority_lookup (tree decl)
6371 {
6372 struct tree_priority_map *h;
6373 struct tree_map_base in;
6374
6375 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
6376 in.from = decl;
6377 h = (struct tree_priority_map *) htab_find (init_priority_for_decl, &in);
6378 return h ? h->fini : DEFAULT_INIT_PRIORITY;
6379 }
6380
6381 /* Return the initialization and finalization priority information for
6382 DECL. If there is no previous priority information, a freshly
6383 allocated structure is returned. */
6384
6385 static struct tree_priority_map *
6386 decl_priority_info (tree decl)
6387 {
6388 struct tree_priority_map in;
6389 struct tree_priority_map *h;
6390 void **loc;
6391
6392 in.base.from = decl;
6393 loc = htab_find_slot (init_priority_for_decl, &in, INSERT);
6394 h = (struct tree_priority_map *) *loc;
6395 if (!h)
6396 {
6397 h = ggc_alloc_cleared_tree_priority_map ();
6398 *loc = h;
6399 h->base.from = decl;
6400 h->init = DEFAULT_INIT_PRIORITY;
6401 h->fini = DEFAULT_INIT_PRIORITY;
6402 }
6403
6404 return h;
6405 }
6406
6407 /* Set the initialization priority for DECL to PRIORITY. */
6408
6409 void
6410 decl_init_priority_insert (tree decl, priority_type priority)
6411 {
6412 struct tree_priority_map *h;
6413
6414 gcc_assert (VAR_OR_FUNCTION_DECL_P (decl));
6415 if (priority == DEFAULT_INIT_PRIORITY)
6416 return;
6417 h = decl_priority_info (decl);
6418 h->init = priority;
6419 }
6420
6421 /* Set the finalization priority for DECL to PRIORITY. */
6422
6423 void
6424 decl_fini_priority_insert (tree decl, priority_type priority)
6425 {
6426 struct tree_priority_map *h;
6427
6428 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
6429 if (priority == DEFAULT_INIT_PRIORITY)
6430 return;
6431 h = decl_priority_info (decl);
6432 h->fini = priority;
6433 }
6434
6435 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6436
6437 static void
6438 print_debug_expr_statistics (void)
6439 {
6440 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6441 (long) htab_size (debug_expr_for_decl),
6442 (long) htab_elements (debug_expr_for_decl),
6443 htab_collisions (debug_expr_for_decl));
6444 }
6445
6446 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6447
6448 static void
6449 print_value_expr_statistics (void)
6450 {
6451 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6452 (long) htab_size (value_expr_for_decl),
6453 (long) htab_elements (value_expr_for_decl),
6454 htab_collisions (value_expr_for_decl));
6455 }
6456
6457 /* Lookup a debug expression for FROM, and return it if we find one. */
6458
6459 tree
6460 decl_debug_expr_lookup (tree from)
6461 {
6462 struct tree_decl_map *h, in;
6463 in.base.from = from;
6464
6465 h = (struct tree_decl_map *)
6466 htab_find_with_hash (debug_expr_for_decl, &in, DECL_UID (from));
6467 if (h)
6468 return h->to;
6469 return NULL_TREE;
6470 }
6471
6472 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6473
6474 void
6475 decl_debug_expr_insert (tree from, tree to)
6476 {
6477 struct tree_decl_map *h;
6478 void **loc;
6479
6480 h = ggc_alloc_tree_decl_map ();
6481 h->base.from = from;
6482 h->to = to;
6483 loc = htab_find_slot_with_hash (debug_expr_for_decl, h, DECL_UID (from),
6484 INSERT);
6485 *(struct tree_decl_map **) loc = h;
6486 }
6487
6488 /* Lookup a value expression for FROM, and return it if we find one. */
6489
6490 tree
6491 decl_value_expr_lookup (tree from)
6492 {
6493 struct tree_decl_map *h, in;
6494 in.base.from = from;
6495
6496 h = (struct tree_decl_map *)
6497 htab_find_with_hash (value_expr_for_decl, &in, DECL_UID (from));
6498 if (h)
6499 return h->to;
6500 return NULL_TREE;
6501 }
6502
6503 /* Insert a mapping FROM->TO in the value expression hashtable. */
6504
6505 void
6506 decl_value_expr_insert (tree from, tree to)
6507 {
6508 struct tree_decl_map *h;
6509 void **loc;
6510
6511 h = ggc_alloc_tree_decl_map ();
6512 h->base.from = from;
6513 h->to = to;
6514 loc = htab_find_slot_with_hash (value_expr_for_decl, h, DECL_UID (from),
6515 INSERT);
6516 *(struct tree_decl_map **) loc = h;
6517 }
6518
6519 /* Lookup a vector of debug arguments for FROM, and return it if we
6520 find one. */
6521
6522 vec<tree, va_gc> **
6523 decl_debug_args_lookup (tree from)
6524 {
6525 struct tree_vec_map *h, in;
6526
6527 if (!DECL_HAS_DEBUG_ARGS_P (from))
6528 return NULL;
6529 gcc_checking_assert (debug_args_for_decl != NULL);
6530 in.base.from = from;
6531 h = (struct tree_vec_map *)
6532 htab_find_with_hash (debug_args_for_decl, &in, DECL_UID (from));
6533 if (h)
6534 return &h->to;
6535 return NULL;
6536 }
6537
6538 /* Insert a mapping FROM->empty vector of debug arguments in the value
6539 expression hashtable. */
6540
6541 vec<tree, va_gc> **
6542 decl_debug_args_insert (tree from)
6543 {
6544 struct tree_vec_map *h;
6545 void **loc;
6546
6547 if (DECL_HAS_DEBUG_ARGS_P (from))
6548 return decl_debug_args_lookup (from);
6549 if (debug_args_for_decl == NULL)
6550 debug_args_for_decl = htab_create_ggc (64, tree_vec_map_hash,
6551 tree_vec_map_eq, 0);
6552 h = ggc_alloc_tree_vec_map ();
6553 h->base.from = from;
6554 h->to = NULL;
6555 loc = htab_find_slot_with_hash (debug_args_for_decl, h, DECL_UID (from),
6556 INSERT);
6557 *(struct tree_vec_map **) loc = h;
6558 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6559 return &h->to;
6560 }
6561
6562 /* Hashing of types so that we don't make duplicates.
6563 The entry point is `type_hash_canon'. */
6564
6565 /* Compute a hash code for a list of types (chain of TREE_LIST nodes
6566 with types in the TREE_VALUE slots), by adding the hash codes
6567 of the individual types. */
6568
6569 static unsigned int
6570 type_hash_list (const_tree list, hashval_t hashcode)
6571 {
6572 const_tree tail;
6573
6574 for (tail = list; tail; tail = TREE_CHAIN (tail))
6575 if (TREE_VALUE (tail) != error_mark_node)
6576 hashcode = iterative_hash_object (TYPE_HASH (TREE_VALUE (tail)),
6577 hashcode);
6578
6579 return hashcode;
6580 }
6581
6582 /* These are the Hashtable callback functions. */
6583
6584 /* Returns true iff the types are equivalent. */
6585
6586 static int
6587 type_hash_eq (const void *va, const void *vb)
6588 {
6589 const struct type_hash *const a = (const struct type_hash *) va,
6590 *const b = (const struct type_hash *) vb;
6591
6592 /* First test the things that are the same for all types. */
6593 if (a->hash != b->hash
6594 || TREE_CODE (a->type) != TREE_CODE (b->type)
6595 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6596 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6597 TYPE_ATTRIBUTES (b->type))
6598 || (TREE_CODE (a->type) != COMPLEX_TYPE
6599 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6600 return 0;
6601
6602 /* Be careful about comparing arrays before and after the element type
6603 has been completed; don't compare TYPE_ALIGN unless both types are
6604 complete. */
6605 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6606 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6607 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6608 return 0;
6609
6610 switch (TREE_CODE (a->type))
6611 {
6612 case VOID_TYPE:
6613 case COMPLEX_TYPE:
6614 case POINTER_TYPE:
6615 case REFERENCE_TYPE:
6616 case NULLPTR_TYPE:
6617 return 1;
6618
6619 case VECTOR_TYPE:
6620 return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type);
6621
6622 case ENUMERAL_TYPE:
6623 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6624 && !(TYPE_VALUES (a->type)
6625 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6626 && TYPE_VALUES (b->type)
6627 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6628 && type_list_equal (TYPE_VALUES (a->type),
6629 TYPE_VALUES (b->type))))
6630 return 0;
6631
6632 /* ... fall through ... */
6633
6634 case INTEGER_TYPE:
6635 case REAL_TYPE:
6636 case BOOLEAN_TYPE:
6637 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6638 return false;
6639 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6640 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6641 TYPE_MAX_VALUE (b->type)))
6642 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6643 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6644 TYPE_MIN_VALUE (b->type))));
6645
6646 case FIXED_POINT_TYPE:
6647 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6648
6649 case OFFSET_TYPE:
6650 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6651
6652 case METHOD_TYPE:
6653 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6654 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6655 || (TYPE_ARG_TYPES (a->type)
6656 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6657 && TYPE_ARG_TYPES (b->type)
6658 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6659 && type_list_equal (TYPE_ARG_TYPES (a->type),
6660 TYPE_ARG_TYPES (b->type)))))
6661 break;
6662 return 0;
6663 case ARRAY_TYPE:
6664 return TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type);
6665
6666 case RECORD_TYPE:
6667 case UNION_TYPE:
6668 case QUAL_UNION_TYPE:
6669 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6670 || (TYPE_FIELDS (a->type)
6671 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6672 && TYPE_FIELDS (b->type)
6673 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6674 && type_list_equal (TYPE_FIELDS (a->type),
6675 TYPE_FIELDS (b->type))));
6676
6677 case FUNCTION_TYPE:
6678 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6679 || (TYPE_ARG_TYPES (a->type)
6680 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6681 && TYPE_ARG_TYPES (b->type)
6682 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6683 && type_list_equal (TYPE_ARG_TYPES (a->type),
6684 TYPE_ARG_TYPES (b->type))))
6685 break;
6686 return 0;
6687
6688 default:
6689 return 0;
6690 }
6691
6692 if (lang_hooks.types.type_hash_eq != NULL)
6693 return lang_hooks.types.type_hash_eq (a->type, b->type);
6694
6695 return 1;
6696 }
6697
6698 /* Return the cached hash value. */
6699
6700 static hashval_t
6701 type_hash_hash (const void *item)
6702 {
6703 return ((const struct type_hash *) item)->hash;
6704 }
6705
6706 /* Look in the type hash table for a type isomorphic to TYPE.
6707 If one is found, return it. Otherwise return 0. */
6708
6709 static tree
6710 type_hash_lookup (hashval_t hashcode, tree type)
6711 {
6712 struct type_hash *h, in;
6713
6714 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6715 must call that routine before comparing TYPE_ALIGNs. */
6716 layout_type (type);
6717
6718 in.hash = hashcode;
6719 in.type = type;
6720
6721 h = (struct type_hash *) htab_find_with_hash (type_hash_table, &in,
6722 hashcode);
6723 if (h)
6724 return h->type;
6725 return NULL_TREE;
6726 }
6727
6728 /* Add an entry to the type-hash-table
6729 for a type TYPE whose hash code is HASHCODE. */
6730
6731 static void
6732 type_hash_add (hashval_t hashcode, tree type)
6733 {
6734 struct type_hash *h;
6735 void **loc;
6736
6737 h = ggc_alloc_type_hash ();
6738 h->hash = hashcode;
6739 h->type = type;
6740 loc = htab_find_slot_with_hash (type_hash_table, h, hashcode, INSERT);
6741 *loc = (void *)h;
6742 }
6743
6744 /* Given TYPE, and HASHCODE its hash code, return the canonical
6745 object for an identical type if one already exists.
6746 Otherwise, return TYPE, and record it as the canonical object.
6747
6748 To use this function, first create a type of the sort you want.
6749 Then compute its hash code from the fields of the type that
6750 make it different from other similar types.
6751 Then call this function and use the value. */
6752
6753 tree
6754 type_hash_canon (unsigned int hashcode, tree type)
6755 {
6756 tree t1;
6757
6758 /* The hash table only contains main variants, so ensure that's what we're
6759 being passed. */
6760 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6761
6762 /* See if the type is in the hash table already. If so, return it.
6763 Otherwise, add the type. */
6764 t1 = type_hash_lookup (hashcode, type);
6765 if (t1 != 0)
6766 {
6767 if (GATHER_STATISTICS)
6768 {
6769 tree_code_counts[(int) TREE_CODE (type)]--;
6770 tree_node_counts[(int) t_kind]--;
6771 tree_node_sizes[(int) t_kind] -= sizeof (struct tree_type_non_common);
6772 }
6773 return t1;
6774 }
6775 else
6776 {
6777 type_hash_add (hashcode, type);
6778 return type;
6779 }
6780 }
6781
6782 /* See if the data pointed to by the type hash table is marked. We consider
6783 it marked if the type is marked or if a debug type number or symbol
6784 table entry has been made for the type. */
6785
6786 static int
6787 type_hash_marked_p (const void *p)
6788 {
6789 const_tree const type = ((const struct type_hash *) p)->type;
6790
6791 return ggc_marked_p (type);
6792 }
6793
6794 static void
6795 print_type_hash_statistics (void)
6796 {
6797 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
6798 (long) htab_size (type_hash_table),
6799 (long) htab_elements (type_hash_table),
6800 htab_collisions (type_hash_table));
6801 }
6802
6803 /* Compute a hash code for a list of attributes (chain of TREE_LIST nodes
6804 with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots),
6805 by adding the hash codes of the individual attributes. */
6806
6807 static unsigned int
6808 attribute_hash_list (const_tree list, hashval_t hashcode)
6809 {
6810 const_tree tail;
6811
6812 for (tail = list; tail; tail = TREE_CHAIN (tail))
6813 /* ??? Do we want to add in TREE_VALUE too? */
6814 hashcode = iterative_hash_object
6815 (IDENTIFIER_HASH_VALUE (get_attribute_name (tail)), hashcode);
6816 return hashcode;
6817 }
6818
6819 /* Given two lists of attributes, return true if list l2 is
6820 equivalent to l1. */
6821
6822 int
6823 attribute_list_equal (const_tree l1, const_tree l2)
6824 {
6825 if (l1 == l2)
6826 return 1;
6827
6828 return attribute_list_contained (l1, l2)
6829 && attribute_list_contained (l2, l1);
6830 }
6831
6832 /* Given two lists of attributes, return true if list L2 is
6833 completely contained within L1. */
6834 /* ??? This would be faster if attribute names were stored in a canonicalized
6835 form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method
6836 must be used to show these elements are equivalent (which they are). */
6837 /* ??? It's not clear that attributes with arguments will always be handled
6838 correctly. */
6839
6840 int
6841 attribute_list_contained (const_tree l1, const_tree l2)
6842 {
6843 const_tree t1, t2;
6844
6845 /* First check the obvious, maybe the lists are identical. */
6846 if (l1 == l2)
6847 return 1;
6848
6849 /* Maybe the lists are similar. */
6850 for (t1 = l1, t2 = l2;
6851 t1 != 0 && t2 != 0
6852 && get_attribute_name (t1) == get_attribute_name (t2)
6853 && TREE_VALUE (t1) == TREE_VALUE (t2);
6854 t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6855 ;
6856
6857 /* Maybe the lists are equal. */
6858 if (t1 == 0 && t2 == 0)
6859 return 1;
6860
6861 for (; t2 != 0; t2 = TREE_CHAIN (t2))
6862 {
6863 const_tree attr;
6864 /* This CONST_CAST is okay because lookup_attribute does not
6865 modify its argument and the return value is assigned to a
6866 const_tree. */
6867 for (attr = lookup_ident_attribute (get_attribute_name (t2),
6868 CONST_CAST_TREE (l1));
6869 attr != NULL_TREE && !attribute_value_equal (t2, attr);
6870 attr = lookup_ident_attribute (get_attribute_name (t2),
6871 TREE_CHAIN (attr)))
6872 ;
6873
6874 if (attr == NULL_TREE)
6875 return 0;
6876 }
6877
6878 return 1;
6879 }
6880
6881 /* Given two lists of types
6882 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6883 return 1 if the lists contain the same types in the same order.
6884 Also, the TREE_PURPOSEs must match. */
6885
6886 int
6887 type_list_equal (const_tree l1, const_tree l2)
6888 {
6889 const_tree t1, t2;
6890
6891 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6892 if (TREE_VALUE (t1) != TREE_VALUE (t2)
6893 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
6894 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
6895 && (TREE_TYPE (TREE_PURPOSE (t1))
6896 == TREE_TYPE (TREE_PURPOSE (t2))))))
6897 return 0;
6898
6899 return t1 == t2;
6900 }
6901
6902 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
6903 given by TYPE. If the argument list accepts variable arguments,
6904 then this function counts only the ordinary arguments. */
6905
6906 int
6907 type_num_arguments (const_tree type)
6908 {
6909 int i = 0;
6910 tree t;
6911
6912 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
6913 /* If the function does not take a variable number of arguments,
6914 the last element in the list will have type `void'. */
6915 if (VOID_TYPE_P (TREE_VALUE (t)))
6916 break;
6917 else
6918 ++i;
6919
6920 return i;
6921 }
6922
6923 /* Nonzero if integer constants T1 and T2
6924 represent the same constant value. */
6925
6926 int
6927 tree_int_cst_equal (const_tree t1, const_tree t2)
6928 {
6929 if (t1 == t2)
6930 return 1;
6931
6932 if (t1 == 0 || t2 == 0)
6933 return 0;
6934
6935 if (TREE_CODE (t1) == INTEGER_CST
6936 && TREE_CODE (t2) == INTEGER_CST
6937 && wi::to_widest (t1) == wi::to_widest (t2))
6938 return 1;
6939
6940 return 0;
6941 }
6942
6943 /* Nonzero if integer constants T1 and T2 represent values that satisfy <.
6944 The precise way of comparison depends on their data type. */
6945
6946 int
6947 tree_int_cst_lt (const_tree t1, const_tree t2)
6948 {
6949 return INT_CST_LT (t1, t2);
6950 }
6951
6952 /* Returns -1 if T1 < T2, 0 if T1 == T2, and 1 if T1 > T2. */
6953
6954 int
6955 tree_int_cst_compare (const_tree t1, const_tree t2)
6956 {
6957 return wi::cmps (wi::to_widest (t1), wi::to_widest (t2));
6958 }
6959
6960 /* Return the most significant (sign) bit of T. */
6961
6962 int
6963 tree_int_cst_sign_bit (const_tree t)
6964 {
6965 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
6966
6967 return wi::extract_uhwi (t, bitno, 1);
6968 }
6969
6970 /* Return an indication of the sign of the integer constant T.
6971 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
6972 Note that -1 will never be returned if T's type is unsigned. */
6973
6974 int
6975 tree_int_cst_sgn (const_tree t)
6976 {
6977 if (wi::eq_p (t, 0))
6978 return 0;
6979 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
6980 return 1;
6981 else if (wi::neg_p (t))
6982 return -1;
6983 else
6984 return 1;
6985 }
6986
6987 /* Return the minimum number of bits needed to represent VALUE in a
6988 signed or unsigned type, UNSIGNEDP says which. */
6989
6990 unsigned int
6991 tree_int_cst_min_precision (tree value, signop sgn)
6992 {
6993 /* If the value is negative, compute its negative minus 1. The latter
6994 adjustment is because the absolute value of the largest negative value
6995 is one larger than the largest positive value. This is equivalent to
6996 a bit-wise negation, so use that operation instead. */
6997
6998 if (tree_int_cst_sgn (value) < 0)
6999 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7000
7001 /* Return the number of bits needed, taking into account the fact
7002 that we need one more bit for a signed than unsigned type.
7003 If value is 0 or -1, the minimum precision is 1 no matter
7004 whether unsignedp is true or false. */
7005
7006 if (integer_zerop (value))
7007 return 1;
7008 else
7009 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7010 }
7011
7012 /* Return truthvalue of whether T1 is the same tree structure as T2.
7013 Return 1 if they are the same.
7014 Return 0 if they are understandably different.
7015 Return -1 if either contains tree structure not understood by
7016 this function. */
7017
7018 int
7019 simple_cst_equal (const_tree t1, const_tree t2)
7020 {
7021 enum tree_code code1, code2;
7022 int cmp;
7023 int i;
7024
7025 if (t1 == t2)
7026 return 1;
7027 if (t1 == 0 || t2 == 0)
7028 return 0;
7029
7030 code1 = TREE_CODE (t1);
7031 code2 = TREE_CODE (t2);
7032
7033 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7034 {
7035 if (CONVERT_EXPR_CODE_P (code2)
7036 || code2 == NON_LVALUE_EXPR)
7037 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7038 else
7039 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7040 }
7041
7042 else if (CONVERT_EXPR_CODE_P (code2)
7043 || code2 == NON_LVALUE_EXPR)
7044 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7045
7046 if (code1 != code2)
7047 return 0;
7048
7049 switch (code1)
7050 {
7051 case INTEGER_CST:
7052 return wi::to_widest (t1) == wi::to_widest (t2);
7053
7054 case REAL_CST:
7055 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (t1), TREE_REAL_CST (t2));
7056
7057 case FIXED_CST:
7058 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7059
7060 case STRING_CST:
7061 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7062 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7063 TREE_STRING_LENGTH (t1)));
7064
7065 case CONSTRUCTOR:
7066 {
7067 unsigned HOST_WIDE_INT idx;
7068 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7069 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7070
7071 if (vec_safe_length (v1) != vec_safe_length (v2))
7072 return false;
7073
7074 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7075 /* ??? Should we handle also fields here? */
7076 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7077 return false;
7078 return true;
7079 }
7080
7081 case SAVE_EXPR:
7082 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7083
7084 case CALL_EXPR:
7085 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7086 if (cmp <= 0)
7087 return cmp;
7088 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7089 return 0;
7090 {
7091 const_tree arg1, arg2;
7092 const_call_expr_arg_iterator iter1, iter2;
7093 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7094 arg2 = first_const_call_expr_arg (t2, &iter2);
7095 arg1 && arg2;
7096 arg1 = next_const_call_expr_arg (&iter1),
7097 arg2 = next_const_call_expr_arg (&iter2))
7098 {
7099 cmp = simple_cst_equal (arg1, arg2);
7100 if (cmp <= 0)
7101 return cmp;
7102 }
7103 return arg1 == arg2;
7104 }
7105
7106 case TARGET_EXPR:
7107 /* Special case: if either target is an unallocated VAR_DECL,
7108 it means that it's going to be unified with whatever the
7109 TARGET_EXPR is really supposed to initialize, so treat it
7110 as being equivalent to anything. */
7111 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7112 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7113 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7114 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7115 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7116 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7117 cmp = 1;
7118 else
7119 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7120
7121 if (cmp <= 0)
7122 return cmp;
7123
7124 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7125
7126 case WITH_CLEANUP_EXPR:
7127 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7128 if (cmp <= 0)
7129 return cmp;
7130
7131 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7132
7133 case COMPONENT_REF:
7134 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7135 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7136
7137 return 0;
7138
7139 case VAR_DECL:
7140 case PARM_DECL:
7141 case CONST_DECL:
7142 case FUNCTION_DECL:
7143 return 0;
7144
7145 default:
7146 break;
7147 }
7148
7149 /* This general rule works for most tree codes. All exceptions should be
7150 handled above. If this is a language-specific tree code, we can't
7151 trust what might be in the operand, so say we don't know
7152 the situation. */
7153 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7154 return -1;
7155
7156 switch (TREE_CODE_CLASS (code1))
7157 {
7158 case tcc_unary:
7159 case tcc_binary:
7160 case tcc_comparison:
7161 case tcc_expression:
7162 case tcc_reference:
7163 case tcc_statement:
7164 cmp = 1;
7165 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7166 {
7167 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7168 if (cmp <= 0)
7169 return cmp;
7170 }
7171
7172 return cmp;
7173
7174 default:
7175 return -1;
7176 }
7177 }
7178
7179 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7180 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7181 than U, respectively. */
7182
7183 int
7184 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7185 {
7186 if (tree_int_cst_sgn (t) < 0)
7187 return -1;
7188 else if (!cst_fits_uhwi_p (t))
7189 return 1;
7190 else if ((unsigned HOST_WIDE_INT) tree_to_hwi (t) == u)
7191 return 0;
7192 else if ((unsigned HOST_WIDE_INT) tree_to_hwi (t) < u)
7193 return -1;
7194 else
7195 return 1;
7196 }
7197
7198 /* Return true if SIZE represents a constant size that is in bounds of
7199 what the middle-end and the backend accepts (covering not more than
7200 half of the address-space). */
7201
7202 bool
7203 valid_constant_size_p (const_tree size)
7204 {
7205 if (! tree_fits_uhwi_p (size)
7206 || TREE_OVERFLOW (size)
7207 || tree_int_cst_sign_bit (size) != 0)
7208 return false;
7209 return true;
7210 }
7211
7212 /* Return the precision of the type, or for a complex or vector type the
7213 precision of the type of its elements. */
7214
7215 unsigned int
7216 element_precision (const_tree type)
7217 {
7218 enum tree_code code = TREE_CODE (type);
7219 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7220 type = TREE_TYPE (type);
7221
7222 return TYPE_PRECISION (type);
7223 }
7224
7225 /* Return true if CODE represents an associative tree code. Otherwise
7226 return false. */
7227 bool
7228 associative_tree_code (enum tree_code code)
7229 {
7230 switch (code)
7231 {
7232 case BIT_IOR_EXPR:
7233 case BIT_AND_EXPR:
7234 case BIT_XOR_EXPR:
7235 case PLUS_EXPR:
7236 case MULT_EXPR:
7237 case MIN_EXPR:
7238 case MAX_EXPR:
7239 return true;
7240
7241 default:
7242 break;
7243 }
7244 return false;
7245 }
7246
7247 /* Return true if CODE represents a commutative tree code. Otherwise
7248 return false. */
7249 bool
7250 commutative_tree_code (enum tree_code code)
7251 {
7252 switch (code)
7253 {
7254 case PLUS_EXPR:
7255 case MULT_EXPR:
7256 case MULT_HIGHPART_EXPR:
7257 case MIN_EXPR:
7258 case MAX_EXPR:
7259 case BIT_IOR_EXPR:
7260 case BIT_XOR_EXPR:
7261 case BIT_AND_EXPR:
7262 case NE_EXPR:
7263 case EQ_EXPR:
7264 case UNORDERED_EXPR:
7265 case ORDERED_EXPR:
7266 case UNEQ_EXPR:
7267 case LTGT_EXPR:
7268 case TRUTH_AND_EXPR:
7269 case TRUTH_XOR_EXPR:
7270 case TRUTH_OR_EXPR:
7271 case WIDEN_MULT_EXPR:
7272 case VEC_WIDEN_MULT_HI_EXPR:
7273 case VEC_WIDEN_MULT_LO_EXPR:
7274 case VEC_WIDEN_MULT_EVEN_EXPR:
7275 case VEC_WIDEN_MULT_ODD_EXPR:
7276 return true;
7277
7278 default:
7279 break;
7280 }
7281 return false;
7282 }
7283
7284 /* Return true if CODE represents a ternary tree code for which the
7285 first two operands are commutative. Otherwise return false. */
7286 bool
7287 commutative_ternary_tree_code (enum tree_code code)
7288 {
7289 switch (code)
7290 {
7291 case WIDEN_MULT_PLUS_EXPR:
7292 case WIDEN_MULT_MINUS_EXPR:
7293 return true;
7294
7295 default:
7296 break;
7297 }
7298 return false;
7299 }
7300
7301 /* Generate a hash value for an expression. This can be used iteratively
7302 by passing a previous result as the VAL argument.
7303
7304 This function is intended to produce the same hash for expressions which
7305 would compare equal using operand_equal_p. */
7306
7307 hashval_t
7308 iterative_hash_expr (const_tree t, hashval_t val)
7309 {
7310 int i;
7311 enum tree_code code;
7312 char tclass;
7313
7314 if (t == NULL_TREE)
7315 return iterative_hash_hashval_t (0, val);
7316
7317 code = TREE_CODE (t);
7318
7319 switch (code)
7320 {
7321 /* Alas, constants aren't shared, so we can't rely on pointer
7322 identity. */
7323 case INTEGER_CST:
7324 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
7325 val = iterative_hash_host_wide_int (TREE_INT_CST_ELT (t, i), val);
7326 return val;
7327 case REAL_CST:
7328 {
7329 unsigned int val2 = real_hash (TREE_REAL_CST_PTR (t));
7330
7331 return iterative_hash_hashval_t (val2, val);
7332 }
7333 case FIXED_CST:
7334 {
7335 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7336
7337 return iterative_hash_hashval_t (val2, val);
7338 }
7339 case STRING_CST:
7340 return iterative_hash (TREE_STRING_POINTER (t),
7341 TREE_STRING_LENGTH (t), val);
7342 case COMPLEX_CST:
7343 val = iterative_hash_expr (TREE_REALPART (t), val);
7344 return iterative_hash_expr (TREE_IMAGPART (t), val);
7345 case VECTOR_CST:
7346 {
7347 unsigned i;
7348 for (i = 0; i < VECTOR_CST_NELTS (t); ++i)
7349 val = iterative_hash_expr (VECTOR_CST_ELT (t, i), val);
7350 return val;
7351 }
7352 case SSA_NAME:
7353 /* We can just compare by pointer. */
7354 return iterative_hash_host_wide_int (SSA_NAME_VERSION (t), val);
7355 case PLACEHOLDER_EXPR:
7356 /* The node itself doesn't matter. */
7357 return val;
7358 case TREE_LIST:
7359 /* A list of expressions, for a CALL_EXPR or as the elements of a
7360 VECTOR_CST. */
7361 for (; t; t = TREE_CHAIN (t))
7362 val = iterative_hash_expr (TREE_VALUE (t), val);
7363 return val;
7364 case CONSTRUCTOR:
7365 {
7366 unsigned HOST_WIDE_INT idx;
7367 tree field, value;
7368 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7369 {
7370 val = iterative_hash_expr (field, val);
7371 val = iterative_hash_expr (value, val);
7372 }
7373 return val;
7374 }
7375 case FUNCTION_DECL:
7376 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7377 Otherwise nodes that compare equal according to operand_equal_p might
7378 get different hash codes. However, don't do this for machine specific
7379 or front end builtins, since the function code is overloaded in those
7380 cases. */
7381 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7382 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7383 {
7384 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7385 code = TREE_CODE (t);
7386 }
7387 /* FALL THROUGH */
7388 default:
7389 tclass = TREE_CODE_CLASS (code);
7390
7391 if (tclass == tcc_declaration)
7392 {
7393 /* DECL's have a unique ID */
7394 val = iterative_hash_host_wide_int (DECL_UID (t), val);
7395 }
7396 else
7397 {
7398 gcc_assert (IS_EXPR_CODE_CLASS (tclass));
7399
7400 val = iterative_hash_object (code, val);
7401
7402 /* Don't hash the type, that can lead to having nodes which
7403 compare equal according to operand_equal_p, but which
7404 have different hash codes. */
7405 if (CONVERT_EXPR_CODE_P (code)
7406 || code == NON_LVALUE_EXPR)
7407 {
7408 /* Make sure to include signness in the hash computation. */
7409 val += TYPE_UNSIGNED (TREE_TYPE (t));
7410 val = iterative_hash_expr (TREE_OPERAND (t, 0), val);
7411 }
7412
7413 else if (commutative_tree_code (code))
7414 {
7415 /* It's a commutative expression. We want to hash it the same
7416 however it appears. We do this by first hashing both operands
7417 and then rehashing based on the order of their independent
7418 hashes. */
7419 hashval_t one = iterative_hash_expr (TREE_OPERAND (t, 0), 0);
7420 hashval_t two = iterative_hash_expr (TREE_OPERAND (t, 1), 0);
7421 hashval_t t;
7422
7423 if (one > two)
7424 t = one, one = two, two = t;
7425
7426 val = iterative_hash_hashval_t (one, val);
7427 val = iterative_hash_hashval_t (two, val);
7428 }
7429 else
7430 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
7431 val = iterative_hash_expr (TREE_OPERAND (t, i), val);
7432 }
7433 return val;
7434 }
7435 }
7436
7437 /* Constructors for pointer, array and function types.
7438 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7439 constructed by language-dependent code, not here.) */
7440
7441 /* Construct, lay out and return the type of pointers to TO_TYPE with
7442 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7443 reference all of memory. If such a type has already been
7444 constructed, reuse it. */
7445
7446 tree
7447 build_pointer_type_for_mode (tree to_type, enum machine_mode mode,
7448 bool can_alias_all)
7449 {
7450 tree t;
7451
7452 if (to_type == error_mark_node)
7453 return error_mark_node;
7454
7455 /* If the pointed-to type has the may_alias attribute set, force
7456 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7457 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7458 can_alias_all = true;
7459
7460 /* In some cases, languages will have things that aren't a POINTER_TYPE
7461 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7462 In that case, return that type without regard to the rest of our
7463 operands.
7464
7465 ??? This is a kludge, but consistent with the way this function has
7466 always operated and there doesn't seem to be a good way to avoid this
7467 at the moment. */
7468 if (TYPE_POINTER_TO (to_type) != 0
7469 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7470 return TYPE_POINTER_TO (to_type);
7471
7472 /* First, if we already have a type for pointers to TO_TYPE and it's
7473 the proper mode, use it. */
7474 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7475 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7476 return t;
7477
7478 t = make_node (POINTER_TYPE);
7479
7480 TREE_TYPE (t) = to_type;
7481 SET_TYPE_MODE (t, mode);
7482 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7483 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7484 TYPE_POINTER_TO (to_type) = t;
7485
7486 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7487 SET_TYPE_STRUCTURAL_EQUALITY (t);
7488 else if (TYPE_CANONICAL (to_type) != to_type)
7489 TYPE_CANONICAL (t)
7490 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7491 mode, can_alias_all);
7492
7493 /* Lay out the type. This function has many callers that are concerned
7494 with expression-construction, and this simplifies them all. */
7495 layout_type (t);
7496
7497 return t;
7498 }
7499
7500 /* By default build pointers in ptr_mode. */
7501
7502 tree
7503 build_pointer_type (tree to_type)
7504 {
7505 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7506 : TYPE_ADDR_SPACE (to_type);
7507 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7508 return build_pointer_type_for_mode (to_type, pointer_mode, false);
7509 }
7510
7511 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7512
7513 tree
7514 build_reference_type_for_mode (tree to_type, enum machine_mode mode,
7515 bool can_alias_all)
7516 {
7517 tree t;
7518
7519 if (to_type == error_mark_node)
7520 return error_mark_node;
7521
7522 /* If the pointed-to type has the may_alias attribute set, force
7523 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7524 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7525 can_alias_all = true;
7526
7527 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7528 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7529 In that case, return that type without regard to the rest of our
7530 operands.
7531
7532 ??? This is a kludge, but consistent with the way this function has
7533 always operated and there doesn't seem to be a good way to avoid this
7534 at the moment. */
7535 if (TYPE_REFERENCE_TO (to_type) != 0
7536 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7537 return TYPE_REFERENCE_TO (to_type);
7538
7539 /* First, if we already have a type for pointers to TO_TYPE and it's
7540 the proper mode, use it. */
7541 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7542 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7543 return t;
7544
7545 t = make_node (REFERENCE_TYPE);
7546
7547 TREE_TYPE (t) = to_type;
7548 SET_TYPE_MODE (t, mode);
7549 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7550 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7551 TYPE_REFERENCE_TO (to_type) = t;
7552
7553 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7554 SET_TYPE_STRUCTURAL_EQUALITY (t);
7555 else if (TYPE_CANONICAL (to_type) != to_type)
7556 TYPE_CANONICAL (t)
7557 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7558 mode, can_alias_all);
7559
7560 layout_type (t);
7561
7562 return t;
7563 }
7564
7565
7566 /* Build the node for the type of references-to-TO_TYPE by default
7567 in ptr_mode. */
7568
7569 tree
7570 build_reference_type (tree to_type)
7571 {
7572 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7573 : TYPE_ADDR_SPACE (to_type);
7574 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7575 return build_reference_type_for_mode (to_type, pointer_mode, false);
7576 }
7577
7578 #define MAX_INT_CACHED_PREC \
7579 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7580 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7581
7582 /* Builds a signed or unsigned integer type of precision PRECISION.
7583 Used for C bitfields whose precision does not match that of
7584 built-in target types. */
7585 tree
7586 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7587 int unsignedp)
7588 {
7589 tree itype, ret;
7590
7591 if (unsignedp)
7592 unsignedp = MAX_INT_CACHED_PREC + 1;
7593
7594 if (precision <= MAX_INT_CACHED_PREC)
7595 {
7596 itype = nonstandard_integer_type_cache[precision + unsignedp];
7597 if (itype)
7598 return itype;
7599 }
7600
7601 itype = make_node (INTEGER_TYPE);
7602 TYPE_PRECISION (itype) = precision;
7603
7604 if (unsignedp)
7605 fixup_unsigned_type (itype);
7606 else
7607 fixup_signed_type (itype);
7608
7609 ret = itype;
7610 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype)))
7611 ret = type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype)), itype);
7612 if (precision <= MAX_INT_CACHED_PREC)
7613 nonstandard_integer_type_cache[precision + unsignedp] = ret;
7614
7615 return ret;
7616 }
7617
7618 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7619 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7620 is true, reuse such a type that has already been constructed. */
7621
7622 static tree
7623 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7624 {
7625 tree itype = make_node (INTEGER_TYPE);
7626 hashval_t hashcode = 0;
7627
7628 TREE_TYPE (itype) = type;
7629
7630 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7631 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7632
7633 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7634 SET_TYPE_MODE (itype, TYPE_MODE (type));
7635 TYPE_SIZE (itype) = TYPE_SIZE (type);
7636 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7637 TYPE_ALIGN (itype) = TYPE_ALIGN (type);
7638 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7639
7640 if (!shared)
7641 return itype;
7642
7643 if ((TYPE_MIN_VALUE (itype)
7644 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7645 || (TYPE_MAX_VALUE (itype)
7646 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7647 {
7648 /* Since we cannot reliably merge this type, we need to compare it using
7649 structural equality checks. */
7650 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7651 return itype;
7652 }
7653
7654 hashcode = iterative_hash_expr (TYPE_MIN_VALUE (itype), hashcode);
7655 hashcode = iterative_hash_expr (TYPE_MAX_VALUE (itype), hashcode);
7656 hashcode = iterative_hash_hashval_t (TYPE_HASH (type), hashcode);
7657 itype = type_hash_canon (hashcode, itype);
7658
7659 return itype;
7660 }
7661
7662 /* Wrapper around build_range_type_1 with SHARED set to true. */
7663
7664 tree
7665 build_range_type (tree type, tree lowval, tree highval)
7666 {
7667 return build_range_type_1 (type, lowval, highval, true);
7668 }
7669
7670 /* Wrapper around build_range_type_1 with SHARED set to false. */
7671
7672 tree
7673 build_nonshared_range_type (tree type, tree lowval, tree highval)
7674 {
7675 return build_range_type_1 (type, lowval, highval, false);
7676 }
7677
7678 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7679 MAXVAL should be the maximum value in the domain
7680 (one less than the length of the array).
7681
7682 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7683 We don't enforce this limit, that is up to caller (e.g. language front end).
7684 The limit exists because the result is a signed type and we don't handle
7685 sizes that use more than one HOST_WIDE_INT. */
7686
7687 tree
7688 build_index_type (tree maxval)
7689 {
7690 return build_range_type (sizetype, size_zero_node, maxval);
7691 }
7692
7693 /* Return true if the debug information for TYPE, a subtype, should be emitted
7694 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7695 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7696 debug info and doesn't reflect the source code. */
7697
7698 bool
7699 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7700 {
7701 tree base_type = TREE_TYPE (type), low, high;
7702
7703 /* Subrange types have a base type which is an integral type. */
7704 if (!INTEGRAL_TYPE_P (base_type))
7705 return false;
7706
7707 /* Get the real bounds of the subtype. */
7708 if (lang_hooks.types.get_subrange_bounds)
7709 lang_hooks.types.get_subrange_bounds (type, &low, &high);
7710 else
7711 {
7712 low = TYPE_MIN_VALUE (type);
7713 high = TYPE_MAX_VALUE (type);
7714 }
7715
7716 /* If the type and its base type have the same representation and the same
7717 name, then the type is not a subrange but a copy of the base type. */
7718 if ((TREE_CODE (base_type) == INTEGER_TYPE
7719 || TREE_CODE (base_type) == BOOLEAN_TYPE)
7720 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
7721 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
7722 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type)))
7723 {
7724 tree type_name = TYPE_NAME (type);
7725 tree base_type_name = TYPE_NAME (base_type);
7726
7727 if (type_name && TREE_CODE (type_name) == TYPE_DECL)
7728 type_name = DECL_NAME (type_name);
7729
7730 if (base_type_name && TREE_CODE (base_type_name) == TYPE_DECL)
7731 base_type_name = DECL_NAME (base_type_name);
7732
7733 if (type_name == base_type_name)
7734 return false;
7735 }
7736
7737 if (lowval)
7738 *lowval = low;
7739 if (highval)
7740 *highval = high;
7741 return true;
7742 }
7743
7744 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7745 and number of elements specified by the range of values of INDEX_TYPE.
7746 If SHARED is true, reuse such a type that has already been constructed. */
7747
7748 static tree
7749 build_array_type_1 (tree elt_type, tree index_type, bool shared)
7750 {
7751 tree t;
7752
7753 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
7754 {
7755 error ("arrays of functions are not meaningful");
7756 elt_type = integer_type_node;
7757 }
7758
7759 t = make_node (ARRAY_TYPE);
7760 TREE_TYPE (t) = elt_type;
7761 TYPE_DOMAIN (t) = index_type;
7762 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
7763 layout_type (t);
7764
7765 /* If the element type is incomplete at this point we get marked for
7766 structural equality. Do not record these types in the canonical
7767 type hashtable. */
7768 if (TYPE_STRUCTURAL_EQUALITY_P (t))
7769 return t;
7770
7771 if (shared)
7772 {
7773 hashval_t hashcode = iterative_hash_object (TYPE_HASH (elt_type), 0);
7774 if (index_type)
7775 hashcode = iterative_hash_object (TYPE_HASH (index_type), hashcode);
7776 t = type_hash_canon (hashcode, t);
7777 }
7778
7779 if (TYPE_CANONICAL (t) == t)
7780 {
7781 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
7782 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type)))
7783 SET_TYPE_STRUCTURAL_EQUALITY (t);
7784 else if (TYPE_CANONICAL (elt_type) != elt_type
7785 || (index_type && TYPE_CANONICAL (index_type) != index_type))
7786 TYPE_CANONICAL (t)
7787 = build_array_type_1 (TYPE_CANONICAL (elt_type),
7788 index_type
7789 ? TYPE_CANONICAL (index_type) : NULL_TREE,
7790 shared);
7791 }
7792
7793 return t;
7794 }
7795
7796 /* Wrapper around build_array_type_1 with SHARED set to true. */
7797
7798 tree
7799 build_array_type (tree elt_type, tree index_type)
7800 {
7801 return build_array_type_1 (elt_type, index_type, true);
7802 }
7803
7804 /* Wrapper around build_array_type_1 with SHARED set to false. */
7805
7806 tree
7807 build_nonshared_array_type (tree elt_type, tree index_type)
7808 {
7809 return build_array_type_1 (elt_type, index_type, false);
7810 }
7811
7812 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7813 sizetype. */
7814
7815 tree
7816 build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
7817 {
7818 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
7819 }
7820
7821 /* Recursively examines the array elements of TYPE, until a non-array
7822 element type is found. */
7823
7824 tree
7825 strip_array_types (tree type)
7826 {
7827 while (TREE_CODE (type) == ARRAY_TYPE)
7828 type = TREE_TYPE (type);
7829
7830 return type;
7831 }
7832
7833 /* Computes the canonical argument types from the argument type list
7834 ARGTYPES.
7835
7836 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7837 on entry to this function, or if any of the ARGTYPES are
7838 structural.
7839
7840 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7841 true on entry to this function, or if any of the ARGTYPES are
7842 non-canonical.
7843
7844 Returns a canonical argument list, which may be ARGTYPES when the
7845 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7846 true) or would not differ from ARGTYPES. */
7847
7848 static tree
7849 maybe_canonicalize_argtypes (tree argtypes,
7850 bool *any_structural_p,
7851 bool *any_noncanonical_p)
7852 {
7853 tree arg;
7854 bool any_noncanonical_argtypes_p = false;
7855
7856 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
7857 {
7858 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
7859 /* Fail gracefully by stating that the type is structural. */
7860 *any_structural_p = true;
7861 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
7862 *any_structural_p = true;
7863 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
7864 || TREE_PURPOSE (arg))
7865 /* If the argument has a default argument, we consider it
7866 non-canonical even though the type itself is canonical.
7867 That way, different variants of function and method types
7868 with default arguments will all point to the variant with
7869 no defaults as their canonical type. */
7870 any_noncanonical_argtypes_p = true;
7871 }
7872
7873 if (*any_structural_p)
7874 return argtypes;
7875
7876 if (any_noncanonical_argtypes_p)
7877 {
7878 /* Build the canonical list of argument types. */
7879 tree canon_argtypes = NULL_TREE;
7880 bool is_void = false;
7881
7882 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
7883 {
7884 if (arg == void_list_node)
7885 is_void = true;
7886 else
7887 canon_argtypes = tree_cons (NULL_TREE,
7888 TYPE_CANONICAL (TREE_VALUE (arg)),
7889 canon_argtypes);
7890 }
7891
7892 canon_argtypes = nreverse (canon_argtypes);
7893 if (is_void)
7894 canon_argtypes = chainon (canon_argtypes, void_list_node);
7895
7896 /* There is a non-canonical type. */
7897 *any_noncanonical_p = true;
7898 return canon_argtypes;
7899 }
7900
7901 /* The canonical argument types are the same as ARGTYPES. */
7902 return argtypes;
7903 }
7904
7905 /* Construct, lay out and return
7906 the type of functions returning type VALUE_TYPE
7907 given arguments of types ARG_TYPES.
7908 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
7909 are data type nodes for the arguments of the function.
7910 If such a type has already been constructed, reuse it. */
7911
7912 tree
7913 build_function_type (tree value_type, tree arg_types)
7914 {
7915 tree t;
7916 hashval_t hashcode = 0;
7917 bool any_structural_p, any_noncanonical_p;
7918 tree canon_argtypes;
7919
7920 if (TREE_CODE (value_type) == FUNCTION_TYPE)
7921 {
7922 error ("function return type cannot be function");
7923 value_type = integer_type_node;
7924 }
7925
7926 /* Make a node of the sort we want. */
7927 t = make_node (FUNCTION_TYPE);
7928 TREE_TYPE (t) = value_type;
7929 TYPE_ARG_TYPES (t) = arg_types;
7930
7931 /* If we already have such a type, use the old one. */
7932 hashcode = iterative_hash_object (TYPE_HASH (value_type), hashcode);
7933 hashcode = type_hash_list (arg_types, hashcode);
7934 t = type_hash_canon (hashcode, t);
7935
7936 /* Set up the canonical type. */
7937 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
7938 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
7939 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
7940 &any_structural_p,
7941 &any_noncanonical_p);
7942 if (any_structural_p)
7943 SET_TYPE_STRUCTURAL_EQUALITY (t);
7944 else if (any_noncanonical_p)
7945 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
7946 canon_argtypes);
7947
7948 if (!COMPLETE_TYPE_P (t))
7949 layout_type (t);
7950 return t;
7951 }
7952
7953 /* Build a function type. The RETURN_TYPE is the type returned by the
7954 function. If VAARGS is set, no void_type_node is appended to the
7955 the list. ARGP must be always be terminated be a NULL_TREE. */
7956
7957 static tree
7958 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
7959 {
7960 tree t, args, last;
7961
7962 t = va_arg (argp, tree);
7963 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
7964 args = tree_cons (NULL_TREE, t, args);
7965
7966 if (vaargs)
7967 {
7968 last = args;
7969 if (args != NULL_TREE)
7970 args = nreverse (args);
7971 gcc_assert (last != void_list_node);
7972 }
7973 else if (args == NULL_TREE)
7974 args = void_list_node;
7975 else
7976 {
7977 last = args;
7978 args = nreverse (args);
7979 TREE_CHAIN (last) = void_list_node;
7980 }
7981 args = build_function_type (return_type, args);
7982
7983 return args;
7984 }
7985
7986 /* Build a function type. The RETURN_TYPE is the type returned by the
7987 function. If additional arguments are provided, they are
7988 additional argument types. The list of argument types must always
7989 be terminated by NULL_TREE. */
7990
7991 tree
7992 build_function_type_list (tree return_type, ...)
7993 {
7994 tree args;
7995 va_list p;
7996
7997 va_start (p, return_type);
7998 args = build_function_type_list_1 (false, return_type, p);
7999 va_end (p);
8000 return args;
8001 }
8002
8003 /* Build a variable argument function type. The RETURN_TYPE is the
8004 type returned by the function. If additional arguments are provided,
8005 they are additional argument types. The list of argument types must
8006 always be terminated by NULL_TREE. */
8007
8008 tree
8009 build_varargs_function_type_list (tree return_type, ...)
8010 {
8011 tree args;
8012 va_list p;
8013
8014 va_start (p, return_type);
8015 args = build_function_type_list_1 (true, return_type, p);
8016 va_end (p);
8017
8018 return args;
8019 }
8020
8021 /* Build a function type. RETURN_TYPE is the type returned by the
8022 function; VAARGS indicates whether the function takes varargs. The
8023 function takes N named arguments, the types of which are provided in
8024 ARG_TYPES. */
8025
8026 static tree
8027 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8028 tree *arg_types)
8029 {
8030 int i;
8031 tree t = vaargs ? NULL_TREE : void_list_node;
8032
8033 for (i = n - 1; i >= 0; i--)
8034 t = tree_cons (NULL_TREE, arg_types[i], t);
8035
8036 return build_function_type (return_type, t);
8037 }
8038
8039 /* Build a function type. RETURN_TYPE is the type returned by the
8040 function. The function takes N named arguments, the types of which
8041 are provided in ARG_TYPES. */
8042
8043 tree
8044 build_function_type_array (tree return_type, int n, tree *arg_types)
8045 {
8046 return build_function_type_array_1 (false, return_type, n, arg_types);
8047 }
8048
8049 /* Build a variable argument function type. RETURN_TYPE is the type
8050 returned by the function. The function takes N named arguments, the
8051 types of which are provided in ARG_TYPES. */
8052
8053 tree
8054 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8055 {
8056 return build_function_type_array_1 (true, return_type, n, arg_types);
8057 }
8058
8059 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8060 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8061 for the method. An implicit additional parameter (of type
8062 pointer-to-BASETYPE) is added to the ARGTYPES. */
8063
8064 tree
8065 build_method_type_directly (tree basetype,
8066 tree rettype,
8067 tree argtypes)
8068 {
8069 tree t;
8070 tree ptype;
8071 int hashcode = 0;
8072 bool any_structural_p, any_noncanonical_p;
8073 tree canon_argtypes;
8074
8075 /* Make a node of the sort we want. */
8076 t = make_node (METHOD_TYPE);
8077
8078 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8079 TREE_TYPE (t) = rettype;
8080 ptype = build_pointer_type (basetype);
8081
8082 /* The actual arglist for this function includes a "hidden" argument
8083 which is "this". Put it into the list of argument types. */
8084 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8085 TYPE_ARG_TYPES (t) = argtypes;
8086
8087 /* If we already have such a type, use the old one. */
8088 hashcode = iterative_hash_object (TYPE_HASH (basetype), hashcode);
8089 hashcode = iterative_hash_object (TYPE_HASH (rettype), hashcode);
8090 hashcode = type_hash_list (argtypes, hashcode);
8091 t = type_hash_canon (hashcode, t);
8092
8093 /* Set up the canonical type. */
8094 any_structural_p
8095 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8096 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8097 any_noncanonical_p
8098 = (TYPE_CANONICAL (basetype) != basetype
8099 || TYPE_CANONICAL (rettype) != rettype);
8100 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8101 &any_structural_p,
8102 &any_noncanonical_p);
8103 if (any_structural_p)
8104 SET_TYPE_STRUCTURAL_EQUALITY (t);
8105 else if (any_noncanonical_p)
8106 TYPE_CANONICAL (t)
8107 = build_method_type_directly (TYPE_CANONICAL (basetype),
8108 TYPE_CANONICAL (rettype),
8109 canon_argtypes);
8110 if (!COMPLETE_TYPE_P (t))
8111 layout_type (t);
8112
8113 return t;
8114 }
8115
8116 /* Construct, lay out and return the type of methods belonging to class
8117 BASETYPE and whose arguments and values are described by TYPE.
8118 If that type exists already, reuse it.
8119 TYPE must be a FUNCTION_TYPE node. */
8120
8121 tree
8122 build_method_type (tree basetype, tree type)
8123 {
8124 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8125
8126 return build_method_type_directly (basetype,
8127 TREE_TYPE (type),
8128 TYPE_ARG_TYPES (type));
8129 }
8130
8131 /* Construct, lay out and return the type of offsets to a value
8132 of type TYPE, within an object of type BASETYPE.
8133 If a suitable offset type exists already, reuse it. */
8134
8135 tree
8136 build_offset_type (tree basetype, tree type)
8137 {
8138 tree t;
8139 hashval_t hashcode = 0;
8140
8141 /* Make a node of the sort we want. */
8142 t = make_node (OFFSET_TYPE);
8143
8144 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8145 TREE_TYPE (t) = type;
8146
8147 /* If we already have such a type, use the old one. */
8148 hashcode = iterative_hash_object (TYPE_HASH (basetype), hashcode);
8149 hashcode = iterative_hash_object (TYPE_HASH (type), hashcode);
8150 t = type_hash_canon (hashcode, t);
8151
8152 if (!COMPLETE_TYPE_P (t))
8153 layout_type (t);
8154
8155 if (TYPE_CANONICAL (t) == t)
8156 {
8157 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8158 || TYPE_STRUCTURAL_EQUALITY_P (type))
8159 SET_TYPE_STRUCTURAL_EQUALITY (t);
8160 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8161 || TYPE_CANONICAL (type) != type)
8162 TYPE_CANONICAL (t)
8163 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8164 TYPE_CANONICAL (type));
8165 }
8166
8167 return t;
8168 }
8169
8170 /* Create a complex type whose components are COMPONENT_TYPE. */
8171
8172 tree
8173 build_complex_type (tree component_type)
8174 {
8175 tree t;
8176 hashval_t hashcode;
8177
8178 gcc_assert (INTEGRAL_TYPE_P (component_type)
8179 || SCALAR_FLOAT_TYPE_P (component_type)
8180 || FIXED_POINT_TYPE_P (component_type));
8181
8182 /* Make a node of the sort we want. */
8183 t = make_node (COMPLEX_TYPE);
8184
8185 TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type);
8186
8187 /* If we already have such a type, use the old one. */
8188 hashcode = iterative_hash_object (TYPE_HASH (component_type), 0);
8189 t = type_hash_canon (hashcode, t);
8190
8191 if (!COMPLETE_TYPE_P (t))
8192 layout_type (t);
8193
8194 if (TYPE_CANONICAL (t) == t)
8195 {
8196 if (TYPE_STRUCTURAL_EQUALITY_P (component_type))
8197 SET_TYPE_STRUCTURAL_EQUALITY (t);
8198 else if (TYPE_CANONICAL (component_type) != component_type)
8199 TYPE_CANONICAL (t)
8200 = build_complex_type (TYPE_CANONICAL (component_type));
8201 }
8202
8203 /* We need to create a name, since complex is a fundamental type. */
8204 if (! TYPE_NAME (t))
8205 {
8206 const char *name;
8207 if (component_type == char_type_node)
8208 name = "complex char";
8209 else if (component_type == signed_char_type_node)
8210 name = "complex signed char";
8211 else if (component_type == unsigned_char_type_node)
8212 name = "complex unsigned char";
8213 else if (component_type == short_integer_type_node)
8214 name = "complex short int";
8215 else if (component_type == short_unsigned_type_node)
8216 name = "complex short unsigned int";
8217 else if (component_type == integer_type_node)
8218 name = "complex int";
8219 else if (component_type == unsigned_type_node)
8220 name = "complex unsigned int";
8221 else if (component_type == long_integer_type_node)
8222 name = "complex long int";
8223 else if (component_type == long_unsigned_type_node)
8224 name = "complex long unsigned int";
8225 else if (component_type == long_long_integer_type_node)
8226 name = "complex long long int";
8227 else if (component_type == long_long_unsigned_type_node)
8228 name = "complex long long unsigned int";
8229 else
8230 name = 0;
8231
8232 if (name != 0)
8233 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8234 get_identifier (name), t);
8235 }
8236
8237 return build_qualified_type (t, TYPE_QUALS (component_type));
8238 }
8239
8240 /* If TYPE is a real or complex floating-point type and the target
8241 does not directly support arithmetic on TYPE then return the wider
8242 type to be used for arithmetic on TYPE. Otherwise, return
8243 NULL_TREE. */
8244
8245 tree
8246 excess_precision_type (tree type)
8247 {
8248 if (flag_excess_precision != EXCESS_PRECISION_FAST)
8249 {
8250 int flt_eval_method = TARGET_FLT_EVAL_METHOD;
8251 switch (TREE_CODE (type))
8252 {
8253 case REAL_TYPE:
8254 switch (flt_eval_method)
8255 {
8256 case 1:
8257 if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
8258 return double_type_node;
8259 break;
8260 case 2:
8261 if (TYPE_MODE (type) == TYPE_MODE (float_type_node)
8262 || TYPE_MODE (type) == TYPE_MODE (double_type_node))
8263 return long_double_type_node;
8264 break;
8265 default:
8266 gcc_unreachable ();
8267 }
8268 break;
8269 case COMPLEX_TYPE:
8270 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8271 return NULL_TREE;
8272 switch (flt_eval_method)
8273 {
8274 case 1:
8275 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node))
8276 return complex_double_type_node;
8277 break;
8278 case 2:
8279 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node)
8280 || (TYPE_MODE (TREE_TYPE (type))
8281 == TYPE_MODE (double_type_node)))
8282 return complex_long_double_type_node;
8283 break;
8284 default:
8285 gcc_unreachable ();
8286 }
8287 break;
8288 default:
8289 break;
8290 }
8291 }
8292 return NULL_TREE;
8293 }
8294 \f
8295 /* Return OP, stripped of any conversions to wider types as much as is safe.
8296 Converting the value back to OP's type makes a value equivalent to OP.
8297
8298 If FOR_TYPE is nonzero, we return a value which, if converted to
8299 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8300
8301 OP must have integer, real or enumeral type. Pointers are not allowed!
8302
8303 There are some cases where the obvious value we could return
8304 would regenerate to OP if converted to OP's type,
8305 but would not extend like OP to wider types.
8306 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8307 For example, if OP is (unsigned short)(signed char)-1,
8308 we avoid returning (signed char)-1 if FOR_TYPE is int,
8309 even though extending that to an unsigned short would regenerate OP,
8310 since the result of extending (signed char)-1 to (int)
8311 is different from (int) OP. */
8312
8313 tree
8314 get_unwidened (tree op, tree for_type)
8315 {
8316 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8317 tree type = TREE_TYPE (op);
8318 unsigned final_prec
8319 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8320 int uns
8321 = (for_type != 0 && for_type != type
8322 && final_prec > TYPE_PRECISION (type)
8323 && TYPE_UNSIGNED (type));
8324 tree win = op;
8325
8326 while (CONVERT_EXPR_P (op))
8327 {
8328 int bitschange;
8329
8330 /* TYPE_PRECISION on vector types has different meaning
8331 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8332 so avoid them here. */
8333 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8334 break;
8335
8336 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8337 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8338
8339 /* Truncations are many-one so cannot be removed.
8340 Unless we are later going to truncate down even farther. */
8341 if (bitschange < 0
8342 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8343 break;
8344
8345 /* See what's inside this conversion. If we decide to strip it,
8346 we will set WIN. */
8347 op = TREE_OPERAND (op, 0);
8348
8349 /* If we have not stripped any zero-extensions (uns is 0),
8350 we can strip any kind of extension.
8351 If we have previously stripped a zero-extension,
8352 only zero-extensions can safely be stripped.
8353 Any extension can be stripped if the bits it would produce
8354 are all going to be discarded later by truncating to FOR_TYPE. */
8355
8356 if (bitschange > 0)
8357 {
8358 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8359 win = op;
8360 /* TYPE_UNSIGNED says whether this is a zero-extension.
8361 Let's avoid computing it if it does not affect WIN
8362 and if UNS will not be needed again. */
8363 if ((uns
8364 || CONVERT_EXPR_P (op))
8365 && TYPE_UNSIGNED (TREE_TYPE (op)))
8366 {
8367 uns = 1;
8368 win = op;
8369 }
8370 }
8371 }
8372
8373 /* If we finally reach a constant see if it fits in for_type and
8374 in that case convert it. */
8375 if (for_type
8376 && TREE_CODE (win) == INTEGER_CST
8377 && TREE_TYPE (win) != for_type
8378 && int_fits_type_p (win, for_type))
8379 win = fold_convert (for_type, win);
8380
8381 return win;
8382 }
8383 \f
8384 /* Return OP or a simpler expression for a narrower value
8385 which can be sign-extended or zero-extended to give back OP.
8386 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8387 or 0 if the value should be sign-extended. */
8388
8389 tree
8390 get_narrower (tree op, int *unsignedp_ptr)
8391 {
8392 int uns = 0;
8393 int first = 1;
8394 tree win = op;
8395 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8396
8397 while (TREE_CODE (op) == NOP_EXPR)
8398 {
8399 int bitschange
8400 = (TYPE_PRECISION (TREE_TYPE (op))
8401 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8402
8403 /* Truncations are many-one so cannot be removed. */
8404 if (bitschange < 0)
8405 break;
8406
8407 /* See what's inside this conversion. If we decide to strip it,
8408 we will set WIN. */
8409
8410 if (bitschange > 0)
8411 {
8412 op = TREE_OPERAND (op, 0);
8413 /* An extension: the outermost one can be stripped,
8414 but remember whether it is zero or sign extension. */
8415 if (first)
8416 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8417 /* Otherwise, if a sign extension has been stripped,
8418 only sign extensions can now be stripped;
8419 if a zero extension has been stripped, only zero-extensions. */
8420 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8421 break;
8422 first = 0;
8423 }
8424 else /* bitschange == 0 */
8425 {
8426 /* A change in nominal type can always be stripped, but we must
8427 preserve the unsignedness. */
8428 if (first)
8429 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8430 first = 0;
8431 op = TREE_OPERAND (op, 0);
8432 /* Keep trying to narrow, but don't assign op to win if it
8433 would turn an integral type into something else. */
8434 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8435 continue;
8436 }
8437
8438 win = op;
8439 }
8440
8441 if (TREE_CODE (op) == COMPONENT_REF
8442 /* Since type_for_size always gives an integer type. */
8443 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8444 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8445 /* Ensure field is laid out already. */
8446 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8447 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8448 {
8449 unsigned HOST_WIDE_INT innerprec
8450 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8451 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8452 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8453 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8454
8455 /* We can get this structure field in a narrower type that fits it,
8456 but the resulting extension to its nominal type (a fullword type)
8457 must satisfy the same conditions as for other extensions.
8458
8459 Do this only for fields that are aligned (not bit-fields),
8460 because when bit-field insns will be used there is no
8461 advantage in doing this. */
8462
8463 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8464 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8465 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8466 && type != 0)
8467 {
8468 if (first)
8469 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8470 win = fold_convert (type, op);
8471 }
8472 }
8473
8474 *unsignedp_ptr = uns;
8475 return win;
8476 }
8477 \f
8478 /* Returns true if integer constant C has a value that is permissible
8479 for type TYPE (an INTEGER_TYPE). */
8480
8481 bool
8482 int_fits_type_p (const_tree c, const_tree type)
8483 {
8484 tree type_low_bound, type_high_bound;
8485 bool ok_for_low_bound, ok_for_high_bound;
8486 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8487
8488 retry:
8489 type_low_bound = TYPE_MIN_VALUE (type);
8490 type_high_bound = TYPE_MAX_VALUE (type);
8491
8492 /* If at least one bound of the type is a constant integer, we can check
8493 ourselves and maybe make a decision. If no such decision is possible, but
8494 this type is a subtype, try checking against that. Otherwise, use
8495 fits_to_tree_p, which checks against the precision.
8496
8497 Compute the status for each possibly constant bound, and return if we see
8498 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8499 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8500 for "constant known to fit". */
8501
8502 /* Check if c >= type_low_bound. */
8503 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8504 {
8505 if (INT_CST_LT (c, type_low_bound))
8506 return false;
8507 ok_for_low_bound = true;
8508 }
8509 else
8510 ok_for_low_bound = false;
8511
8512 /* Check if c <= type_high_bound. */
8513 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8514 {
8515 if (INT_CST_LT (type_high_bound, c))
8516 return false;
8517 ok_for_high_bound = true;
8518 }
8519 else
8520 ok_for_high_bound = false;
8521
8522 /* If the constant fits both bounds, the result is known. */
8523 if (ok_for_low_bound && ok_for_high_bound)
8524 return true;
8525
8526 /* Perform some generic filtering which may allow making a decision
8527 even if the bounds are not constant. First, negative integers
8528 never fit in unsigned types, */
8529 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (c))
8530 return false;
8531
8532 /* Second, narrower types always fit in wider ones. */
8533 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8534 return true;
8535
8536 /* Third, unsigned integers with top bit set never fit signed types. */
8537 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED && wi::neg_p (c))
8538 return false;
8539
8540 /* If we haven't been able to decide at this point, there nothing more we
8541 can check ourselves here. Look at the base type if we have one and it
8542 has the same precision. */
8543 if (TREE_CODE (type) == INTEGER_TYPE
8544 && TREE_TYPE (type) != 0
8545 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8546 {
8547 type = TREE_TYPE (type);
8548 goto retry;
8549 }
8550
8551 /* Or to fits_to_tree_p, if nothing else. */
8552 return wi::fits_to_tree_p (c, type);
8553 }
8554
8555 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8556 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8557 represented (assuming two's-complement arithmetic) within the bit
8558 precision of the type are returned instead. */
8559
8560 void
8561 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8562 {
8563 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8564 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8565 wi::to_mpz (TYPE_MIN_VALUE (type), min, TYPE_SIGN (type));
8566 else
8567 {
8568 if (TYPE_UNSIGNED (type))
8569 mpz_set_ui (min, 0);
8570 else
8571 {
8572 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8573 wi::to_mpz (mn, min, SIGNED);
8574 }
8575 }
8576
8577 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8578 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8579 wi::to_mpz (TYPE_MAX_VALUE (type), max, TYPE_SIGN (type));
8580 else
8581 {
8582 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8583 wi::to_mpz (mn, max, TYPE_SIGN (type));
8584 }
8585 }
8586
8587 /* Return true if VAR is an automatic variable defined in function FN. */
8588
8589 bool
8590 auto_var_in_fn_p (const_tree var, const_tree fn)
8591 {
8592 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8593 && ((((TREE_CODE (var) == VAR_DECL && ! DECL_EXTERNAL (var))
8594 || TREE_CODE (var) == PARM_DECL)
8595 && ! TREE_STATIC (var))
8596 || TREE_CODE (var) == LABEL_DECL
8597 || TREE_CODE (var) == RESULT_DECL));
8598 }
8599
8600 /* Subprogram of following function. Called by walk_tree.
8601
8602 Return *TP if it is an automatic variable or parameter of the
8603 function passed in as DATA. */
8604
8605 static tree
8606 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8607 {
8608 tree fn = (tree) data;
8609
8610 if (TYPE_P (*tp))
8611 *walk_subtrees = 0;
8612
8613 else if (DECL_P (*tp)
8614 && auto_var_in_fn_p (*tp, fn))
8615 return *tp;
8616
8617 return NULL_TREE;
8618 }
8619
8620 /* Returns true if T is, contains, or refers to a type with variable
8621 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8622 arguments, but not the return type. If FN is nonzero, only return
8623 true if a modifier of the type or position of FN is a variable or
8624 parameter inside FN.
8625
8626 This concept is more general than that of C99 'variably modified types':
8627 in C99, a struct type is never variably modified because a VLA may not
8628 appear as a structure member. However, in GNU C code like:
8629
8630 struct S { int i[f()]; };
8631
8632 is valid, and other languages may define similar constructs. */
8633
8634 bool
8635 variably_modified_type_p (tree type, tree fn)
8636 {
8637 tree t;
8638
8639 /* Test if T is either variable (if FN is zero) or an expression containing
8640 a variable in FN. If TYPE isn't gimplified, return true also if
8641 gimplify_one_sizepos would gimplify the expression into a local
8642 variable. */
8643 #define RETURN_TRUE_IF_VAR(T) \
8644 do { tree _t = (T); \
8645 if (_t != NULL_TREE \
8646 && _t != error_mark_node \
8647 && TREE_CODE (_t) != INTEGER_CST \
8648 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8649 && (!fn \
8650 || (!TYPE_SIZES_GIMPLIFIED (type) \
8651 && !is_gimple_sizepos (_t)) \
8652 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8653 return true; } while (0)
8654
8655 if (type == error_mark_node)
8656 return false;
8657
8658 /* If TYPE itself has variable size, it is variably modified. */
8659 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8660 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8661
8662 switch (TREE_CODE (type))
8663 {
8664 case POINTER_TYPE:
8665 case REFERENCE_TYPE:
8666 case VECTOR_TYPE:
8667 if (variably_modified_type_p (TREE_TYPE (type), fn))
8668 return true;
8669 break;
8670
8671 case FUNCTION_TYPE:
8672 case METHOD_TYPE:
8673 /* If TYPE is a function type, it is variably modified if the
8674 return type is variably modified. */
8675 if (variably_modified_type_p (TREE_TYPE (type), fn))
8676 return true;
8677 break;
8678
8679 case INTEGER_TYPE:
8680 case REAL_TYPE:
8681 case FIXED_POINT_TYPE:
8682 case ENUMERAL_TYPE:
8683 case BOOLEAN_TYPE:
8684 /* Scalar types are variably modified if their end points
8685 aren't constant. */
8686 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8687 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8688 break;
8689
8690 case RECORD_TYPE:
8691 case UNION_TYPE:
8692 case QUAL_UNION_TYPE:
8693 /* We can't see if any of the fields are variably-modified by the
8694 definition we normally use, since that would produce infinite
8695 recursion via pointers. */
8696 /* This is variably modified if some field's type is. */
8697 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8698 if (TREE_CODE (t) == FIELD_DECL)
8699 {
8700 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8701 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8702 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8703
8704 if (TREE_CODE (type) == QUAL_UNION_TYPE)
8705 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
8706 }
8707 break;
8708
8709 case ARRAY_TYPE:
8710 /* Do not call ourselves to avoid infinite recursion. This is
8711 variably modified if the element type is. */
8712 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
8713 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
8714 break;
8715
8716 default:
8717 break;
8718 }
8719
8720 /* The current language may have other cases to check, but in general,
8721 all other types are not variably modified. */
8722 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
8723
8724 #undef RETURN_TRUE_IF_VAR
8725 }
8726
8727 /* Given a DECL or TYPE, return the scope in which it was declared, or
8728 NULL_TREE if there is no containing scope. */
8729
8730 tree
8731 get_containing_scope (const_tree t)
8732 {
8733 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
8734 }
8735
8736 /* Return the innermost context enclosing DECL that is
8737 a FUNCTION_DECL, or zero if none. */
8738
8739 tree
8740 decl_function_context (const_tree decl)
8741 {
8742 tree context;
8743
8744 if (TREE_CODE (decl) == ERROR_MARK)
8745 return 0;
8746
8747 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8748 where we look up the function at runtime. Such functions always take
8749 a first argument of type 'pointer to real context'.
8750
8751 C++ should really be fixed to use DECL_CONTEXT for the real context,
8752 and use something else for the "virtual context". */
8753 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
8754 context
8755 = TYPE_MAIN_VARIANT
8756 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
8757 else
8758 context = DECL_CONTEXT (decl);
8759
8760 while (context && TREE_CODE (context) != FUNCTION_DECL)
8761 {
8762 if (TREE_CODE (context) == BLOCK)
8763 context = BLOCK_SUPERCONTEXT (context);
8764 else
8765 context = get_containing_scope (context);
8766 }
8767
8768 return context;
8769 }
8770
8771 /* Return the innermost context enclosing DECL that is
8772 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8773 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8774
8775 tree
8776 decl_type_context (const_tree decl)
8777 {
8778 tree context = DECL_CONTEXT (decl);
8779
8780 while (context)
8781 switch (TREE_CODE (context))
8782 {
8783 case NAMESPACE_DECL:
8784 case TRANSLATION_UNIT_DECL:
8785 return NULL_TREE;
8786
8787 case RECORD_TYPE:
8788 case UNION_TYPE:
8789 case QUAL_UNION_TYPE:
8790 return context;
8791
8792 case TYPE_DECL:
8793 case FUNCTION_DECL:
8794 context = DECL_CONTEXT (context);
8795 break;
8796
8797 case BLOCK:
8798 context = BLOCK_SUPERCONTEXT (context);
8799 break;
8800
8801 default:
8802 gcc_unreachable ();
8803 }
8804
8805 return NULL_TREE;
8806 }
8807
8808 /* CALL is a CALL_EXPR. Return the declaration for the function
8809 called, or NULL_TREE if the called function cannot be
8810 determined. */
8811
8812 tree
8813 get_callee_fndecl (const_tree call)
8814 {
8815 tree addr;
8816
8817 if (call == error_mark_node)
8818 return error_mark_node;
8819
8820 /* It's invalid to call this function with anything but a
8821 CALL_EXPR. */
8822 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8823
8824 /* The first operand to the CALL is the address of the function
8825 called. */
8826 addr = CALL_EXPR_FN (call);
8827
8828 STRIP_NOPS (addr);
8829
8830 /* If this is a readonly function pointer, extract its initial value. */
8831 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
8832 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
8833 && DECL_INITIAL (addr))
8834 addr = DECL_INITIAL (addr);
8835
8836 /* If the address is just `&f' for some function `f', then we know
8837 that `f' is being called. */
8838 if (TREE_CODE (addr) == ADDR_EXPR
8839 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
8840 return TREE_OPERAND (addr, 0);
8841
8842 /* We couldn't figure out what was being called. */
8843 return NULL_TREE;
8844 }
8845
8846 /* Print debugging information about tree nodes generated during the compile,
8847 and any language-specific information. */
8848
8849 void
8850 dump_tree_statistics (void)
8851 {
8852 if (GATHER_STATISTICS)
8853 {
8854 int i;
8855 int total_nodes, total_bytes;
8856 fprintf (stderr, "Kind Nodes Bytes\n");
8857 fprintf (stderr, "---------------------------------------\n");
8858 total_nodes = total_bytes = 0;
8859 for (i = 0; i < (int) all_kinds; i++)
8860 {
8861 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i],
8862 tree_node_counts[i], tree_node_sizes[i]);
8863 total_nodes += tree_node_counts[i];
8864 total_bytes += tree_node_sizes[i];
8865 }
8866 fprintf (stderr, "---------------------------------------\n");
8867 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes);
8868 fprintf (stderr, "---------------------------------------\n");
8869 fprintf (stderr, "Code Nodes\n");
8870 fprintf (stderr, "----------------------------\n");
8871 for (i = 0; i < (int) MAX_TREE_CODES; i++)
8872 fprintf (stderr, "%-20s %7d\n", get_tree_code_name ((enum tree_code) i),
8873 tree_code_counts[i]);
8874 fprintf (stderr, "----------------------------\n");
8875 ssanames_print_statistics ();
8876 phinodes_print_statistics ();
8877 }
8878 else
8879 fprintf (stderr, "(No per-node statistics)\n");
8880
8881 print_type_hash_statistics ();
8882 print_debug_expr_statistics ();
8883 print_value_expr_statistics ();
8884 lang_hooks.print_statistics ();
8885 }
8886 \f
8887 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
8888
8889 /* Generate a crc32 of a byte. */
8890
8891 static unsigned
8892 crc32_unsigned_bits (unsigned chksum, unsigned value, unsigned bits)
8893 {
8894 unsigned ix;
8895
8896 for (ix = bits; ix--; value <<= 1)
8897 {
8898 unsigned feedback;
8899
8900 feedback = (value ^ chksum) & 0x80000000 ? 0x04c11db7 : 0;
8901 chksum <<= 1;
8902 chksum ^= feedback;
8903 }
8904 return chksum;
8905 }
8906
8907 /* Generate a crc32 of a 32-bit unsigned. */
8908
8909 unsigned
8910 crc32_unsigned (unsigned chksum, unsigned value)
8911 {
8912 return crc32_unsigned_bits (chksum, value, 32);
8913 }
8914
8915 /* Generate a crc32 of a byte. */
8916
8917 unsigned
8918 crc32_byte (unsigned chksum, char byte)
8919 {
8920 return crc32_unsigned_bits (chksum, (unsigned) byte << 24, 8);
8921 }
8922
8923 /* Generate a crc32 of a string. */
8924
8925 unsigned
8926 crc32_string (unsigned chksum, const char *string)
8927 {
8928 do
8929 {
8930 chksum = crc32_byte (chksum, *string);
8931 }
8932 while (*string++);
8933 return chksum;
8934 }
8935
8936 /* P is a string that will be used in a symbol. Mask out any characters
8937 that are not valid in that context. */
8938
8939 void
8940 clean_symbol_name (char *p)
8941 {
8942 for (; *p; p++)
8943 if (! (ISALNUM (*p)
8944 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
8945 || *p == '$'
8946 #endif
8947 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
8948 || *p == '.'
8949 #endif
8950 ))
8951 *p = '_';
8952 }
8953
8954 /* Generate a name for a special-purpose function.
8955 The generated name may need to be unique across the whole link.
8956 Changes to this function may also require corresponding changes to
8957 xstrdup_mask_random.
8958 TYPE is some string to identify the purpose of this function to the
8959 linker or collect2; it must start with an uppercase letter,
8960 one of:
8961 I - for constructors
8962 D - for destructors
8963 N - for C++ anonymous namespaces
8964 F - for DWARF unwind frame information. */
8965
8966 tree
8967 get_file_function_name (const char *type)
8968 {
8969 char *buf;
8970 const char *p;
8971 char *q;
8972
8973 /* If we already have a name we know to be unique, just use that. */
8974 if (first_global_object_name)
8975 p = q = ASTRDUP (first_global_object_name);
8976 /* If the target is handling the constructors/destructors, they
8977 will be local to this file and the name is only necessary for
8978 debugging purposes.
8979 We also assign sub_I and sub_D sufixes to constructors called from
8980 the global static constructors. These are always local. */
8981 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
8982 || (strncmp (type, "sub_", 4) == 0
8983 && (type[4] == 'I' || type[4] == 'D')))
8984 {
8985 const char *file = main_input_filename;
8986 if (! file)
8987 file = input_filename;
8988 /* Just use the file's basename, because the full pathname
8989 might be quite long. */
8990 p = q = ASTRDUP (lbasename (file));
8991 }
8992 else
8993 {
8994 /* Otherwise, the name must be unique across the entire link.
8995 We don't have anything that we know to be unique to this translation
8996 unit, so use what we do have and throw in some randomness. */
8997 unsigned len;
8998 const char *name = weak_global_object_name;
8999 const char *file = main_input_filename;
9000
9001 if (! name)
9002 name = "";
9003 if (! file)
9004 file = input_filename;
9005
9006 len = strlen (file);
9007 q = (char *) alloca (9 + 17 + len + 1);
9008 memcpy (q, file, len + 1);
9009
9010 snprintf (q + len, 9 + 17 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9011 crc32_string (0, name), get_random_seed (false));
9012
9013 p = q;
9014 }
9015
9016 clean_symbol_name (q);
9017 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9018 + strlen (type));
9019
9020 /* Set up the name of the file-level functions we may need.
9021 Use a global object (which is already required to be unique over
9022 the program) rather than the file name (which imposes extra
9023 constraints). */
9024 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9025
9026 return get_identifier (buf);
9027 }
9028 \f
9029 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9030
9031 /* Complain that the tree code of NODE does not match the expected 0
9032 terminated list of trailing codes. The trailing code list can be
9033 empty, for a more vague error message. FILE, LINE, and FUNCTION
9034 are of the caller. */
9035
9036 void
9037 tree_check_failed (const_tree node, const char *file,
9038 int line, const char *function, ...)
9039 {
9040 va_list args;
9041 const char *buffer;
9042 unsigned length = 0;
9043 enum tree_code code;
9044
9045 va_start (args, function);
9046 while ((code = (enum tree_code) va_arg (args, int)))
9047 length += 4 + strlen (get_tree_code_name (code));
9048 va_end (args);
9049 if (length)
9050 {
9051 char *tmp;
9052 va_start (args, function);
9053 length += strlen ("expected ");
9054 buffer = tmp = (char *) alloca (length);
9055 length = 0;
9056 while ((code = (enum tree_code) va_arg (args, int)))
9057 {
9058 const char *prefix = length ? " or " : "expected ";
9059
9060 strcpy (tmp + length, prefix);
9061 length += strlen (prefix);
9062 strcpy (tmp + length, get_tree_code_name (code));
9063 length += strlen (get_tree_code_name (code));
9064 }
9065 va_end (args);
9066 }
9067 else
9068 buffer = "unexpected node";
9069
9070 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9071 buffer, get_tree_code_name (TREE_CODE (node)),
9072 function, trim_filename (file), line);
9073 }
9074
9075 /* Complain that the tree code of NODE does match the expected 0
9076 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9077 the caller. */
9078
9079 void
9080 tree_not_check_failed (const_tree node, const char *file,
9081 int line, const char *function, ...)
9082 {
9083 va_list args;
9084 char *buffer;
9085 unsigned length = 0;
9086 enum tree_code code;
9087
9088 va_start (args, function);
9089 while ((code = (enum tree_code) va_arg (args, int)))
9090 length += 4 + strlen (get_tree_code_name (code));
9091 va_end (args);
9092 va_start (args, function);
9093 buffer = (char *) alloca (length);
9094 length = 0;
9095 while ((code = (enum tree_code) va_arg (args, int)))
9096 {
9097 if (length)
9098 {
9099 strcpy (buffer + length, " or ");
9100 length += 4;
9101 }
9102 strcpy (buffer + length, get_tree_code_name (code));
9103 length += strlen (get_tree_code_name (code));
9104 }
9105 va_end (args);
9106
9107 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9108 buffer, get_tree_code_name (TREE_CODE (node)),
9109 function, trim_filename (file), line);
9110 }
9111
9112 /* Similar to tree_check_failed, except that we check for a class of tree
9113 code, given in CL. */
9114
9115 void
9116 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9117 const char *file, int line, const char *function)
9118 {
9119 internal_error
9120 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9121 TREE_CODE_CLASS_STRING (cl),
9122 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9123 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9124 }
9125
9126 /* Similar to tree_check_failed, except that instead of specifying a
9127 dozen codes, use the knowledge that they're all sequential. */
9128
9129 void
9130 tree_range_check_failed (const_tree node, const char *file, int line,
9131 const char *function, enum tree_code c1,
9132 enum tree_code c2)
9133 {
9134 char *buffer;
9135 unsigned length = 0;
9136 unsigned int c;
9137
9138 for (c = c1; c <= c2; ++c)
9139 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9140
9141 length += strlen ("expected ");
9142 buffer = (char *) alloca (length);
9143 length = 0;
9144
9145 for (c = c1; c <= c2; ++c)
9146 {
9147 const char *prefix = length ? " or " : "expected ";
9148
9149 strcpy (buffer + length, prefix);
9150 length += strlen (prefix);
9151 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9152 length += strlen (get_tree_code_name ((enum tree_code) c));
9153 }
9154
9155 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9156 buffer, get_tree_code_name (TREE_CODE (node)),
9157 function, trim_filename (file), line);
9158 }
9159
9160
9161 /* Similar to tree_check_failed, except that we check that a tree does
9162 not have the specified code, given in CL. */
9163
9164 void
9165 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9166 const char *file, int line, const char *function)
9167 {
9168 internal_error
9169 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9170 TREE_CODE_CLASS_STRING (cl),
9171 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9172 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9173 }
9174
9175
9176 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9177
9178 void
9179 omp_clause_check_failed (const_tree node, const char *file, int line,
9180 const char *function, enum omp_clause_code code)
9181 {
9182 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9183 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9184 function, trim_filename (file), line);
9185 }
9186
9187
9188 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9189
9190 void
9191 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9192 const char *function, enum omp_clause_code c1,
9193 enum omp_clause_code c2)
9194 {
9195 char *buffer;
9196 unsigned length = 0;
9197 unsigned int c;
9198
9199 for (c = c1; c <= c2; ++c)
9200 length += 4 + strlen (omp_clause_code_name[c]);
9201
9202 length += strlen ("expected ");
9203 buffer = (char *) alloca (length);
9204 length = 0;
9205
9206 for (c = c1; c <= c2; ++c)
9207 {
9208 const char *prefix = length ? " or " : "expected ";
9209
9210 strcpy (buffer + length, prefix);
9211 length += strlen (prefix);
9212 strcpy (buffer + length, omp_clause_code_name[c]);
9213 length += strlen (omp_clause_code_name[c]);
9214 }
9215
9216 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9217 buffer, omp_clause_code_name[TREE_CODE (node)],
9218 function, trim_filename (file), line);
9219 }
9220
9221
9222 #undef DEFTREESTRUCT
9223 #define DEFTREESTRUCT(VAL, NAME) NAME,
9224
9225 static const char *ts_enum_names[] = {
9226 #include "treestruct.def"
9227 };
9228 #undef DEFTREESTRUCT
9229
9230 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9231
9232 /* Similar to tree_class_check_failed, except that we check for
9233 whether CODE contains the tree structure identified by EN. */
9234
9235 void
9236 tree_contains_struct_check_failed (const_tree node,
9237 const enum tree_node_structure_enum en,
9238 const char *file, int line,
9239 const char *function)
9240 {
9241 internal_error
9242 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9243 TS_ENUM_NAME (en),
9244 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9245 }
9246
9247
9248 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9249 (dynamically sized) vector. */
9250
9251 void
9252 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9253 const char *function)
9254 {
9255 internal_error
9256 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9257 idx + 1, len, function, trim_filename (file), line);
9258 }
9259
9260 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9261 (dynamically sized) vector. */
9262
9263 void
9264 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9265 const char *function)
9266 {
9267 internal_error
9268 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9269 idx + 1, len, function, trim_filename (file), line);
9270 }
9271
9272 /* Similar to above, except that the check is for the bounds of the operand
9273 vector of an expression node EXP. */
9274
9275 void
9276 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9277 int line, const char *function)
9278 {
9279 enum tree_code code = TREE_CODE (exp);
9280 internal_error
9281 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9282 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9283 function, trim_filename (file), line);
9284 }
9285
9286 /* Similar to above, except that the check is for the number of
9287 operands of an OMP_CLAUSE node. */
9288
9289 void
9290 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9291 int line, const char *function)
9292 {
9293 internal_error
9294 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9295 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9296 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9297 trim_filename (file), line);
9298 }
9299 #endif /* ENABLE_TREE_CHECKING */
9300 \f
9301 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9302 and mapped to the machine mode MODE. Initialize its fields and build
9303 the information necessary for debugging output. */
9304
9305 static tree
9306 make_vector_type (tree innertype, int nunits, enum machine_mode mode)
9307 {
9308 tree t;
9309 hashval_t hashcode = 0;
9310
9311 t = make_node (VECTOR_TYPE);
9312 TREE_TYPE (t) = TYPE_MAIN_VARIANT (innertype);
9313 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9314 SET_TYPE_MODE (t, mode);
9315
9316 if (TYPE_STRUCTURAL_EQUALITY_P (innertype))
9317 SET_TYPE_STRUCTURAL_EQUALITY (t);
9318 else if (TYPE_CANONICAL (innertype) != innertype
9319 || mode != VOIDmode)
9320 TYPE_CANONICAL (t)
9321 = make_vector_type (TYPE_CANONICAL (innertype), nunits, VOIDmode);
9322
9323 layout_type (t);
9324
9325 hashcode = iterative_hash_host_wide_int (VECTOR_TYPE, hashcode);
9326 hashcode = iterative_hash_host_wide_int (nunits, hashcode);
9327 hashcode = iterative_hash_host_wide_int (mode, hashcode);
9328 hashcode = iterative_hash_object (TYPE_HASH (TREE_TYPE (t)), hashcode);
9329 t = type_hash_canon (hashcode, t);
9330
9331 /* We have built a main variant, based on the main variant of the
9332 inner type. Use it to build the variant we return. */
9333 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9334 && TREE_TYPE (t) != innertype)
9335 return build_type_attribute_qual_variant (t,
9336 TYPE_ATTRIBUTES (innertype),
9337 TYPE_QUALS (innertype));
9338
9339 return t;
9340 }
9341
9342 static tree
9343 make_or_reuse_type (unsigned size, int unsignedp)
9344 {
9345 if (size == INT_TYPE_SIZE)
9346 return unsignedp ? unsigned_type_node : integer_type_node;
9347 if (size == CHAR_TYPE_SIZE)
9348 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9349 if (size == SHORT_TYPE_SIZE)
9350 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9351 if (size == LONG_TYPE_SIZE)
9352 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9353 if (size == LONG_LONG_TYPE_SIZE)
9354 return (unsignedp ? long_long_unsigned_type_node
9355 : long_long_integer_type_node);
9356 if (size == 128 && int128_integer_type_node)
9357 return (unsignedp ? int128_unsigned_type_node
9358 : int128_integer_type_node);
9359
9360 if (unsignedp)
9361 return make_unsigned_type (size);
9362 else
9363 return make_signed_type (size);
9364 }
9365
9366 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9367
9368 static tree
9369 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9370 {
9371 if (satp)
9372 {
9373 if (size == SHORT_FRACT_TYPE_SIZE)
9374 return unsignedp ? sat_unsigned_short_fract_type_node
9375 : sat_short_fract_type_node;
9376 if (size == FRACT_TYPE_SIZE)
9377 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9378 if (size == LONG_FRACT_TYPE_SIZE)
9379 return unsignedp ? sat_unsigned_long_fract_type_node
9380 : sat_long_fract_type_node;
9381 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9382 return unsignedp ? sat_unsigned_long_long_fract_type_node
9383 : sat_long_long_fract_type_node;
9384 }
9385 else
9386 {
9387 if (size == SHORT_FRACT_TYPE_SIZE)
9388 return unsignedp ? unsigned_short_fract_type_node
9389 : short_fract_type_node;
9390 if (size == FRACT_TYPE_SIZE)
9391 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9392 if (size == LONG_FRACT_TYPE_SIZE)
9393 return unsignedp ? unsigned_long_fract_type_node
9394 : long_fract_type_node;
9395 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9396 return unsignedp ? unsigned_long_long_fract_type_node
9397 : long_long_fract_type_node;
9398 }
9399
9400 return make_fract_type (size, unsignedp, satp);
9401 }
9402
9403 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9404
9405 static tree
9406 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9407 {
9408 if (satp)
9409 {
9410 if (size == SHORT_ACCUM_TYPE_SIZE)
9411 return unsignedp ? sat_unsigned_short_accum_type_node
9412 : sat_short_accum_type_node;
9413 if (size == ACCUM_TYPE_SIZE)
9414 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9415 if (size == LONG_ACCUM_TYPE_SIZE)
9416 return unsignedp ? sat_unsigned_long_accum_type_node
9417 : sat_long_accum_type_node;
9418 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9419 return unsignedp ? sat_unsigned_long_long_accum_type_node
9420 : sat_long_long_accum_type_node;
9421 }
9422 else
9423 {
9424 if (size == SHORT_ACCUM_TYPE_SIZE)
9425 return unsignedp ? unsigned_short_accum_type_node
9426 : short_accum_type_node;
9427 if (size == ACCUM_TYPE_SIZE)
9428 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9429 if (size == LONG_ACCUM_TYPE_SIZE)
9430 return unsignedp ? unsigned_long_accum_type_node
9431 : long_accum_type_node;
9432 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9433 return unsignedp ? unsigned_long_long_accum_type_node
9434 : long_long_accum_type_node;
9435 }
9436
9437 return make_accum_type (size, unsignedp, satp);
9438 }
9439
9440
9441 /* Create an atomic variant node for TYPE. This routine is called
9442 during initialization of data types to create the 5 basic atomic
9443 types. The generic build_variant_type function requires these to
9444 already be set up in order to function properly, so cannot be
9445 called from there. */
9446
9447 static tree
9448 build_atomic_base (tree type)
9449 {
9450 tree t;
9451
9452 /* Make sure its not already registered. */
9453 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9454 return t;
9455
9456 t = build_variant_type_copy (type);
9457 set_type_quals (t, TYPE_QUAL_ATOMIC);
9458
9459 return t;
9460 }
9461
9462 /* Create nodes for all integer types (and error_mark_node) using the sizes
9463 of C datatypes. SIGNED_CHAR specifies whether char is signed,
9464 SHORT_DOUBLE specifies whether double should be of the same precision
9465 as float. */
9466
9467 void
9468 build_common_tree_nodes (bool signed_char, bool short_double)
9469 {
9470 error_mark_node = make_node (ERROR_MARK);
9471 TREE_TYPE (error_mark_node) = error_mark_node;
9472
9473 initialize_sizetypes ();
9474
9475 /* Define both `signed char' and `unsigned char'. */
9476 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9477 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9478 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9479 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9480
9481 /* Define `char', which is like either `signed char' or `unsigned char'
9482 but not the same as either. */
9483 char_type_node
9484 = (signed_char
9485 ? make_signed_type (CHAR_TYPE_SIZE)
9486 : make_unsigned_type (CHAR_TYPE_SIZE));
9487 TYPE_STRING_FLAG (char_type_node) = 1;
9488
9489 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9490 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9491 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9492 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9493 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9494 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9495 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9496 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9497 #if HOST_BITS_PER_WIDE_INT >= 64
9498 /* TODO: This isn't correct, but as logic depends at the moment on
9499 host's instead of target's wide-integer.
9500 If there is a target not supporting TImode, but has an 128-bit
9501 integer-scalar register, this target check needs to be adjusted. */
9502 if (targetm.scalar_mode_supported_p (TImode))
9503 {
9504 int128_integer_type_node = make_signed_type (128);
9505 int128_unsigned_type_node = make_unsigned_type (128);
9506 }
9507 #endif
9508
9509 /* Define a boolean type. This type only represents boolean values but
9510 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9511 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9512 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9513 TYPE_PRECISION (boolean_type_node) = 1;
9514 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9515
9516 /* Define what type to use for size_t. */
9517 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9518 size_type_node = unsigned_type_node;
9519 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9520 size_type_node = long_unsigned_type_node;
9521 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9522 size_type_node = long_long_unsigned_type_node;
9523 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9524 size_type_node = short_unsigned_type_node;
9525 else
9526 gcc_unreachable ();
9527
9528 /* Fill in the rest of the sized types. Reuse existing type nodes
9529 when possible. */
9530 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9531 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9532 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9533 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9534 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9535
9536 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9537 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9538 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9539 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9540 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9541
9542 /* Don't call build_qualified type for atomics. That routine does
9543 special processing for atomics, and until they are initialized
9544 it's better not to make that call. */
9545
9546 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node);
9547 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node);
9548 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node);
9549 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node);
9550 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node);
9551
9552 access_public_node = get_identifier ("public");
9553 access_protected_node = get_identifier ("protected");
9554 access_private_node = get_identifier ("private");
9555
9556 /* Define these next since types below may used them. */
9557 integer_zero_node = build_int_cst (integer_type_node, 0);
9558 integer_one_node = build_int_cst (integer_type_node, 1);
9559 integer_three_node = build_int_cst (integer_type_node, 3);
9560 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9561
9562 size_zero_node = size_int (0);
9563 size_one_node = size_int (1);
9564 bitsize_zero_node = bitsize_int (0);
9565 bitsize_one_node = bitsize_int (1);
9566 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9567
9568 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9569 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9570
9571 void_type_node = make_node (VOID_TYPE);
9572 layout_type (void_type_node);
9573
9574 pointer_bounds_type_node = targetm.chkp_bound_type ();
9575
9576 /* We are not going to have real types in C with less than byte alignment,
9577 so we might as well not have any types that claim to have it. */
9578 TYPE_ALIGN (void_type_node) = BITS_PER_UNIT;
9579 TYPE_USER_ALIGN (void_type_node) = 0;
9580
9581 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9582 layout_type (TREE_TYPE (null_pointer_node));
9583
9584 ptr_type_node = build_pointer_type (void_type_node);
9585 const_ptr_type_node
9586 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9587 fileptr_type_node = ptr_type_node;
9588
9589 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9590
9591 float_type_node = make_node (REAL_TYPE);
9592 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9593 layout_type (float_type_node);
9594
9595 double_type_node = make_node (REAL_TYPE);
9596 if (short_double)
9597 TYPE_PRECISION (double_type_node) = FLOAT_TYPE_SIZE;
9598 else
9599 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9600 layout_type (double_type_node);
9601
9602 long_double_type_node = make_node (REAL_TYPE);
9603 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9604 layout_type (long_double_type_node);
9605
9606 float_ptr_type_node = build_pointer_type (float_type_node);
9607 double_ptr_type_node = build_pointer_type (double_type_node);
9608 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9609 integer_ptr_type_node = build_pointer_type (integer_type_node);
9610
9611 /* Fixed size integer types. */
9612 uint16_type_node = build_nonstandard_integer_type (16, true);
9613 uint32_type_node = build_nonstandard_integer_type (32, true);
9614 uint64_type_node = build_nonstandard_integer_type (64, true);
9615
9616 /* Decimal float types. */
9617 dfloat32_type_node = make_node (REAL_TYPE);
9618 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9619 layout_type (dfloat32_type_node);
9620 SET_TYPE_MODE (dfloat32_type_node, SDmode);
9621 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
9622
9623 dfloat64_type_node = make_node (REAL_TYPE);
9624 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9625 layout_type (dfloat64_type_node);
9626 SET_TYPE_MODE (dfloat64_type_node, DDmode);
9627 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
9628
9629 dfloat128_type_node = make_node (REAL_TYPE);
9630 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9631 layout_type (dfloat128_type_node);
9632 SET_TYPE_MODE (dfloat128_type_node, TDmode);
9633 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
9634
9635 complex_integer_type_node = build_complex_type (integer_type_node);
9636 complex_float_type_node = build_complex_type (float_type_node);
9637 complex_double_type_node = build_complex_type (double_type_node);
9638 complex_long_double_type_node = build_complex_type (long_double_type_node);
9639
9640 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9641 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9642 sat_ ## KIND ## _type_node = \
9643 make_sat_signed_ ## KIND ## _type (SIZE); \
9644 sat_unsigned_ ## KIND ## _type_node = \
9645 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9646 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9647 unsigned_ ## KIND ## _type_node = \
9648 make_unsigned_ ## KIND ## _type (SIZE);
9649
9650 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9651 sat_ ## WIDTH ## KIND ## _type_node = \
9652 make_sat_signed_ ## KIND ## _type (SIZE); \
9653 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9654 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9655 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9656 unsigned_ ## WIDTH ## KIND ## _type_node = \
9657 make_unsigned_ ## KIND ## _type (SIZE);
9658
9659 /* Make fixed-point type nodes based on four different widths. */
9660 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9661 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9662 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9663 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9664 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9665
9666 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9667 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9668 NAME ## _type_node = \
9669 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9670 u ## NAME ## _type_node = \
9671 make_or_reuse_unsigned_ ## KIND ## _type \
9672 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9673 sat_ ## NAME ## _type_node = \
9674 make_or_reuse_sat_signed_ ## KIND ## _type \
9675 (GET_MODE_BITSIZE (MODE ## mode)); \
9676 sat_u ## NAME ## _type_node = \
9677 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9678 (GET_MODE_BITSIZE (U ## MODE ## mode));
9679
9680 /* Fixed-point type and mode nodes. */
9681 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
9682 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
9683 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
9684 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
9685 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
9686 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
9687 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
9688 MAKE_FIXED_MODE_NODE (accum, ha, HA)
9689 MAKE_FIXED_MODE_NODE (accum, sa, SA)
9690 MAKE_FIXED_MODE_NODE (accum, da, DA)
9691 MAKE_FIXED_MODE_NODE (accum, ta, TA)
9692
9693 {
9694 tree t = targetm.build_builtin_va_list ();
9695
9696 /* Many back-ends define record types without setting TYPE_NAME.
9697 If we copied the record type here, we'd keep the original
9698 record type without a name. This breaks name mangling. So,
9699 don't copy record types and let c_common_nodes_and_builtins()
9700 declare the type to be __builtin_va_list. */
9701 if (TREE_CODE (t) != RECORD_TYPE)
9702 t = build_variant_type_copy (t);
9703
9704 va_list_type_node = t;
9705 }
9706 }
9707
9708 /* Modify DECL for given flags.
9709 TM_PURE attribute is set only on types, so the function will modify
9710 DECL's type when ECF_TM_PURE is used. */
9711
9712 void
9713 set_call_expr_flags (tree decl, int flags)
9714 {
9715 if (flags & ECF_NOTHROW)
9716 TREE_NOTHROW (decl) = 1;
9717 if (flags & ECF_CONST)
9718 TREE_READONLY (decl) = 1;
9719 if (flags & ECF_PURE)
9720 DECL_PURE_P (decl) = 1;
9721 if (flags & ECF_LOOPING_CONST_OR_PURE)
9722 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
9723 if (flags & ECF_NOVOPS)
9724 DECL_IS_NOVOPS (decl) = 1;
9725 if (flags & ECF_NORETURN)
9726 TREE_THIS_VOLATILE (decl) = 1;
9727 if (flags & ECF_MALLOC)
9728 DECL_IS_MALLOC (decl) = 1;
9729 if (flags & ECF_RETURNS_TWICE)
9730 DECL_IS_RETURNS_TWICE (decl) = 1;
9731 if (flags & ECF_LEAF)
9732 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
9733 NULL, DECL_ATTRIBUTES (decl));
9734 if ((flags & ECF_TM_PURE) && flag_tm)
9735 apply_tm_attr (decl, get_identifier ("transaction_pure"));
9736 /* Looping const or pure is implied by noreturn.
9737 There is currently no way to declare looping const or looping pure alone. */
9738 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
9739 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
9740 }
9741
9742
9743 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9744
9745 static void
9746 local_define_builtin (const char *name, tree type, enum built_in_function code,
9747 const char *library_name, int ecf_flags)
9748 {
9749 tree decl;
9750
9751 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
9752 library_name, NULL_TREE);
9753 set_call_expr_flags (decl, ecf_flags);
9754
9755 set_builtin_decl (code, decl, true);
9756 }
9757
9758 /* Call this function after instantiating all builtins that the language
9759 front end cares about. This will build the rest of the builtins that
9760 are relied upon by the tree optimizers and the middle-end. */
9761
9762 void
9763 build_common_builtin_nodes (void)
9764 {
9765 tree tmp, ftype;
9766 int ecf_flags;
9767
9768 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
9769 {
9770 ftype = build_function_type (void_type_node, void_list_node);
9771 local_define_builtin ("__builtin_unreachable", ftype, BUILT_IN_UNREACHABLE,
9772 "__builtin_unreachable",
9773 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
9774 | ECF_CONST | ECF_LEAF);
9775 }
9776
9777 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
9778 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9779 {
9780 ftype = build_function_type_list (ptr_type_node,
9781 ptr_type_node, const_ptr_type_node,
9782 size_type_node, NULL_TREE);
9783
9784 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
9785 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
9786 "memcpy", ECF_NOTHROW | ECF_LEAF);
9787 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9788 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
9789 "memmove", ECF_NOTHROW | ECF_LEAF);
9790 }
9791
9792 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
9793 {
9794 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9795 const_ptr_type_node, size_type_node,
9796 NULL_TREE);
9797 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
9798 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9799 }
9800
9801 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
9802 {
9803 ftype = build_function_type_list (ptr_type_node,
9804 ptr_type_node, integer_type_node,
9805 size_type_node, NULL_TREE);
9806 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
9807 "memset", ECF_NOTHROW | ECF_LEAF);
9808 }
9809
9810 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
9811 {
9812 ftype = build_function_type_list (ptr_type_node,
9813 size_type_node, NULL_TREE);
9814 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
9815 "alloca", ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
9816 }
9817
9818 ftype = build_function_type_list (ptr_type_node, size_type_node,
9819 size_type_node, NULL_TREE);
9820 local_define_builtin ("__builtin_alloca_with_align", ftype,
9821 BUILT_IN_ALLOCA_WITH_ALIGN, "alloca",
9822 ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
9823
9824 /* If we're checking the stack, `alloca' can throw. */
9825 if (flag_stack_check)
9826 {
9827 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA)) = 0;
9828 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN)) = 0;
9829 }
9830
9831 ftype = build_function_type_list (void_type_node,
9832 ptr_type_node, ptr_type_node,
9833 ptr_type_node, NULL_TREE);
9834 local_define_builtin ("__builtin_init_trampoline", ftype,
9835 BUILT_IN_INIT_TRAMPOLINE,
9836 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
9837 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
9838 BUILT_IN_INIT_HEAP_TRAMPOLINE,
9839 "__builtin_init_heap_trampoline",
9840 ECF_NOTHROW | ECF_LEAF);
9841
9842 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
9843 local_define_builtin ("__builtin_adjust_trampoline", ftype,
9844 BUILT_IN_ADJUST_TRAMPOLINE,
9845 "__builtin_adjust_trampoline",
9846 ECF_CONST | ECF_NOTHROW);
9847
9848 ftype = build_function_type_list (void_type_node,
9849 ptr_type_node, ptr_type_node, NULL_TREE);
9850 local_define_builtin ("__builtin_nonlocal_goto", ftype,
9851 BUILT_IN_NONLOCAL_GOTO,
9852 "__builtin_nonlocal_goto",
9853 ECF_NORETURN | ECF_NOTHROW);
9854
9855 ftype = build_function_type_list (void_type_node,
9856 ptr_type_node, ptr_type_node, NULL_TREE);
9857 local_define_builtin ("__builtin_setjmp_setup", ftype,
9858 BUILT_IN_SETJMP_SETUP,
9859 "__builtin_setjmp_setup", ECF_NOTHROW);
9860
9861 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
9862 local_define_builtin ("__builtin_setjmp_dispatcher", ftype,
9863 BUILT_IN_SETJMP_DISPATCHER,
9864 "__builtin_setjmp_dispatcher",
9865 ECF_PURE | ECF_NOTHROW);
9866
9867 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9868 local_define_builtin ("__builtin_setjmp_receiver", ftype,
9869 BUILT_IN_SETJMP_RECEIVER,
9870 "__builtin_setjmp_receiver", ECF_NOTHROW);
9871
9872 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
9873 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
9874 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
9875
9876 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9877 local_define_builtin ("__builtin_stack_restore", ftype,
9878 BUILT_IN_STACK_RESTORE,
9879 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
9880
9881 /* If there's a possibility that we might use the ARM EABI, build the
9882 alternate __cxa_end_cleanup node used to resume from C++ and Java. */
9883 if (targetm.arm_eabi_unwinder)
9884 {
9885 ftype = build_function_type_list (void_type_node, NULL_TREE);
9886 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
9887 BUILT_IN_CXA_END_CLEANUP,
9888 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
9889 }
9890
9891 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9892 local_define_builtin ("__builtin_unwind_resume", ftype,
9893 BUILT_IN_UNWIND_RESUME,
9894 ((targetm_common.except_unwind_info (&global_options)
9895 == UI_SJLJ)
9896 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
9897 ECF_NORETURN);
9898
9899 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
9900 {
9901 ftype = build_function_type_list (ptr_type_node, integer_type_node,
9902 NULL_TREE);
9903 local_define_builtin ("__builtin_return_address", ftype,
9904 BUILT_IN_RETURN_ADDRESS,
9905 "__builtin_return_address",
9906 ECF_NOTHROW);
9907 }
9908
9909 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
9910 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
9911 {
9912 ftype = build_function_type_list (void_type_node, ptr_type_node,
9913 ptr_type_node, NULL_TREE);
9914 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
9915 local_define_builtin ("__cyg_profile_func_enter", ftype,
9916 BUILT_IN_PROFILE_FUNC_ENTER,
9917 "__cyg_profile_func_enter", 0);
9918 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
9919 local_define_builtin ("__cyg_profile_func_exit", ftype,
9920 BUILT_IN_PROFILE_FUNC_EXIT,
9921 "__cyg_profile_func_exit", 0);
9922 }
9923
9924 /* The exception object and filter values from the runtime. The argument
9925 must be zero before exception lowering, i.e. from the front end. After
9926 exception lowering, it will be the region number for the exception
9927 landing pad. These functions are PURE instead of CONST to prevent
9928 them from being hoisted past the exception edge that will initialize
9929 its value in the landing pad. */
9930 ftype = build_function_type_list (ptr_type_node,
9931 integer_type_node, NULL_TREE);
9932 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
9933 /* Only use TM_PURE if we we have TM language support. */
9934 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
9935 ecf_flags |= ECF_TM_PURE;
9936 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
9937 "__builtin_eh_pointer", ecf_flags);
9938
9939 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
9940 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
9941 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
9942 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9943
9944 ftype = build_function_type_list (void_type_node,
9945 integer_type_node, integer_type_node,
9946 NULL_TREE);
9947 local_define_builtin ("__builtin_eh_copy_values", ftype,
9948 BUILT_IN_EH_COPY_VALUES,
9949 "__builtin_eh_copy_values", ECF_NOTHROW);
9950
9951 /* Complex multiplication and division. These are handled as builtins
9952 rather than optabs because emit_library_call_value doesn't support
9953 complex. Further, we can do slightly better with folding these
9954 beasties if the real and complex parts of the arguments are separate. */
9955 {
9956 int mode;
9957
9958 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
9959 {
9960 char mode_name_buf[4], *q;
9961 const char *p;
9962 enum built_in_function mcode, dcode;
9963 tree type, inner_type;
9964 const char *prefix = "__";
9965
9966 if (targetm.libfunc_gnu_prefix)
9967 prefix = "__gnu_";
9968
9969 type = lang_hooks.types.type_for_mode ((enum machine_mode) mode, 0);
9970 if (type == NULL)
9971 continue;
9972 inner_type = TREE_TYPE (type);
9973
9974 ftype = build_function_type_list (type, inner_type, inner_type,
9975 inner_type, inner_type, NULL_TREE);
9976
9977 mcode = ((enum built_in_function)
9978 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
9979 dcode = ((enum built_in_function)
9980 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
9981
9982 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
9983 *q = TOLOWER (*p);
9984 *q = '\0';
9985
9986 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
9987 NULL);
9988 local_define_builtin (built_in_names[mcode], ftype, mcode,
9989 built_in_names[mcode],
9990 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
9991
9992 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
9993 NULL);
9994 local_define_builtin (built_in_names[dcode], ftype, dcode,
9995 built_in_names[dcode],
9996 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
9997 }
9998 }
9999 }
10000
10001 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10002 better way.
10003
10004 If we requested a pointer to a vector, build up the pointers that
10005 we stripped off while looking for the inner type. Similarly for
10006 return values from functions.
10007
10008 The argument TYPE is the top of the chain, and BOTTOM is the
10009 new type which we will point to. */
10010
10011 tree
10012 reconstruct_complex_type (tree type, tree bottom)
10013 {
10014 tree inner, outer;
10015
10016 if (TREE_CODE (type) == POINTER_TYPE)
10017 {
10018 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10019 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10020 TYPE_REF_CAN_ALIAS_ALL (type));
10021 }
10022 else if (TREE_CODE (type) == REFERENCE_TYPE)
10023 {
10024 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10025 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10026 TYPE_REF_CAN_ALIAS_ALL (type));
10027 }
10028 else if (TREE_CODE (type) == ARRAY_TYPE)
10029 {
10030 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10031 outer = build_array_type (inner, TYPE_DOMAIN (type));
10032 }
10033 else if (TREE_CODE (type) == FUNCTION_TYPE)
10034 {
10035 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10036 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10037 }
10038 else if (TREE_CODE (type) == METHOD_TYPE)
10039 {
10040 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10041 /* The build_method_type_directly() routine prepends 'this' to argument list,
10042 so we must compensate by getting rid of it. */
10043 outer
10044 = build_method_type_directly
10045 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10046 inner,
10047 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10048 }
10049 else if (TREE_CODE (type) == OFFSET_TYPE)
10050 {
10051 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10052 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10053 }
10054 else
10055 return bottom;
10056
10057 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10058 TYPE_QUALS (type));
10059 }
10060
10061 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10062 the inner type. */
10063 tree
10064 build_vector_type_for_mode (tree innertype, enum machine_mode mode)
10065 {
10066 int nunits;
10067
10068 switch (GET_MODE_CLASS (mode))
10069 {
10070 case MODE_VECTOR_INT:
10071 case MODE_VECTOR_FLOAT:
10072 case MODE_VECTOR_FRACT:
10073 case MODE_VECTOR_UFRACT:
10074 case MODE_VECTOR_ACCUM:
10075 case MODE_VECTOR_UACCUM:
10076 nunits = GET_MODE_NUNITS (mode);
10077 break;
10078
10079 case MODE_INT:
10080 /* Check that there are no leftover bits. */
10081 gcc_assert (GET_MODE_BITSIZE (mode)
10082 % tree_to_hwi (TYPE_SIZE (innertype)) == 0);
10083
10084 nunits = GET_MODE_BITSIZE (mode)
10085 / tree_to_hwi (TYPE_SIZE (innertype));
10086 break;
10087
10088 default:
10089 gcc_unreachable ();
10090 }
10091
10092 return make_vector_type (innertype, nunits, mode);
10093 }
10094
10095 /* Similarly, but takes the inner type and number of units, which must be
10096 a power of two. */
10097
10098 tree
10099 build_vector_type (tree innertype, int nunits)
10100 {
10101 return make_vector_type (innertype, nunits, VOIDmode);
10102 }
10103
10104 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10105
10106 tree
10107 build_opaque_vector_type (tree innertype, int nunits)
10108 {
10109 tree t = make_vector_type (innertype, nunits, VOIDmode);
10110 tree cand;
10111 /* We always build the non-opaque variant before the opaque one,
10112 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10113 cand = TYPE_NEXT_VARIANT (t);
10114 if (cand
10115 && TYPE_VECTOR_OPAQUE (cand)
10116 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10117 return cand;
10118 /* Othewise build a variant type and make sure to queue it after
10119 the non-opaque type. */
10120 cand = build_distinct_type_copy (t);
10121 TYPE_VECTOR_OPAQUE (cand) = true;
10122 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10123 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10124 TYPE_NEXT_VARIANT (t) = cand;
10125 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10126 return cand;
10127 }
10128
10129
10130 /* Given an initializer INIT, return TRUE if INIT is zero or some
10131 aggregate of zeros. Otherwise return FALSE. */
10132 bool
10133 initializer_zerop (const_tree init)
10134 {
10135 tree elt;
10136
10137 STRIP_NOPS (init);
10138
10139 switch (TREE_CODE (init))
10140 {
10141 case INTEGER_CST:
10142 return integer_zerop (init);
10143
10144 case REAL_CST:
10145 /* ??? Note that this is not correct for C4X float formats. There,
10146 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10147 negative exponent. */
10148 return real_zerop (init)
10149 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
10150
10151 case FIXED_CST:
10152 return fixed_zerop (init);
10153
10154 case COMPLEX_CST:
10155 return integer_zerop (init)
10156 || (real_zerop (init)
10157 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10158 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
10159
10160 case VECTOR_CST:
10161 {
10162 unsigned i;
10163 for (i = 0; i < VECTOR_CST_NELTS (init); ++i)
10164 if (!initializer_zerop (VECTOR_CST_ELT (init, i)))
10165 return false;
10166 return true;
10167 }
10168
10169 case CONSTRUCTOR:
10170 {
10171 unsigned HOST_WIDE_INT idx;
10172
10173 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10174 if (!initializer_zerop (elt))
10175 return false;
10176 return true;
10177 }
10178
10179 case STRING_CST:
10180 {
10181 int i;
10182
10183 /* We need to loop through all elements to handle cases like
10184 "\0" and "\0foobar". */
10185 for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
10186 if (TREE_STRING_POINTER (init)[i] != '\0')
10187 return false;
10188
10189 return true;
10190 }
10191
10192 default:
10193 return false;
10194 }
10195 }
10196
10197 /* Check if vector VEC consists of all the equal elements and
10198 that the number of elements corresponds to the type of VEC.
10199 The function returns first element of the vector
10200 or NULL_TREE if the vector is not uniform. */
10201 tree
10202 uniform_vector_p (const_tree vec)
10203 {
10204 tree first, t;
10205 unsigned i;
10206
10207 if (vec == NULL_TREE)
10208 return NULL_TREE;
10209
10210 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10211
10212 if (TREE_CODE (vec) == VECTOR_CST)
10213 {
10214 first = VECTOR_CST_ELT (vec, 0);
10215 for (i = 1; i < VECTOR_CST_NELTS (vec); ++i)
10216 if (!operand_equal_p (first, VECTOR_CST_ELT (vec, i), 0))
10217 return NULL_TREE;
10218
10219 return first;
10220 }
10221
10222 else if (TREE_CODE (vec) == CONSTRUCTOR)
10223 {
10224 first = error_mark_node;
10225
10226 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10227 {
10228 if (i == 0)
10229 {
10230 first = t;
10231 continue;
10232 }
10233 if (!operand_equal_p (first, t, 0))
10234 return NULL_TREE;
10235 }
10236 if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)))
10237 return NULL_TREE;
10238
10239 return first;
10240 }
10241
10242 return NULL_TREE;
10243 }
10244
10245 /* Build an empty statement at location LOC. */
10246
10247 tree
10248 build_empty_stmt (location_t loc)
10249 {
10250 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10251 SET_EXPR_LOCATION (t, loc);
10252 return t;
10253 }
10254
10255
10256 /* Build an OpenMP clause with code CODE. LOC is the location of the
10257 clause. */
10258
10259 tree
10260 build_omp_clause (location_t loc, enum omp_clause_code code)
10261 {
10262 tree t;
10263 int size, length;
10264
10265 length = omp_clause_num_ops[code];
10266 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10267
10268 record_node_allocation_statistics (OMP_CLAUSE, size);
10269
10270 t = ggc_alloc_tree_node (size);
10271 memset (t, 0, size);
10272 TREE_SET_CODE (t, OMP_CLAUSE);
10273 OMP_CLAUSE_SET_CODE (t, code);
10274 OMP_CLAUSE_LOCATION (t) = loc;
10275
10276 return t;
10277 }
10278
10279 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10280 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10281 Except for the CODE and operand count field, other storage for the
10282 object is initialized to zeros. */
10283
10284 tree
10285 build_vl_exp_stat (enum tree_code code, int len MEM_STAT_DECL)
10286 {
10287 tree t;
10288 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10289
10290 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10291 gcc_assert (len >= 1);
10292
10293 record_node_allocation_statistics (code, length);
10294
10295 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10296
10297 TREE_SET_CODE (t, code);
10298
10299 /* Can't use TREE_OPERAND to store the length because if checking is
10300 enabled, it will try to check the length before we store it. :-P */
10301 t->exp.operands[0] = build_int_cst (sizetype, len);
10302
10303 return t;
10304 }
10305
10306 /* Helper function for build_call_* functions; build a CALL_EXPR with
10307 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10308 the argument slots. */
10309
10310 static tree
10311 build_call_1 (tree return_type, tree fn, int nargs)
10312 {
10313 tree t;
10314
10315 t = build_vl_exp (CALL_EXPR, nargs + 3);
10316 TREE_TYPE (t) = return_type;
10317 CALL_EXPR_FN (t) = fn;
10318 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10319
10320 return t;
10321 }
10322
10323 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10324 FN and a null static chain slot. NARGS is the number of call arguments
10325 which are specified as "..." arguments. */
10326
10327 tree
10328 build_call_nary (tree return_type, tree fn, int nargs, ...)
10329 {
10330 tree ret;
10331 va_list args;
10332 va_start (args, nargs);
10333 ret = build_call_valist (return_type, fn, nargs, args);
10334 va_end (args);
10335 return ret;
10336 }
10337
10338 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10339 FN and a null static chain slot. NARGS is the number of call arguments
10340 which are specified as a va_list ARGS. */
10341
10342 tree
10343 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10344 {
10345 tree t;
10346 int i;
10347
10348 t = build_call_1 (return_type, fn, nargs);
10349 for (i = 0; i < nargs; i++)
10350 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10351 process_call_operands (t);
10352 return t;
10353 }
10354
10355 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10356 FN and a null static chain slot. NARGS is the number of call arguments
10357 which are specified as a tree array ARGS. */
10358
10359 tree
10360 build_call_array_loc (location_t loc, tree return_type, tree fn,
10361 int nargs, const tree *args)
10362 {
10363 tree t;
10364 int i;
10365
10366 t = build_call_1 (return_type, fn, nargs);
10367 for (i = 0; i < nargs; i++)
10368 CALL_EXPR_ARG (t, i) = args[i];
10369 process_call_operands (t);
10370 SET_EXPR_LOCATION (t, loc);
10371 return t;
10372 }
10373
10374 /* Like build_call_array, but takes a vec. */
10375
10376 tree
10377 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
10378 {
10379 tree ret, t;
10380 unsigned int ix;
10381
10382 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10383 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10384 CALL_EXPR_ARG (ret, ix) = t;
10385 process_call_operands (ret);
10386 return ret;
10387 }
10388
10389 /* Return true if T (assumed to be a DECL) must be assigned a memory
10390 location. */
10391
10392 bool
10393 needs_to_live_in_memory (const_tree t)
10394 {
10395 return (TREE_ADDRESSABLE (t)
10396 || is_global_var (t)
10397 || (TREE_CODE (t) == RESULT_DECL
10398 && !DECL_BY_REFERENCE (t)
10399 && aggregate_value_p (t, current_function_decl)));
10400 }
10401
10402 /* Return value of a constant X and sign-extend it. */
10403
10404 HOST_WIDE_INT
10405 int_cst_value (const_tree x)
10406 {
10407 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10408 unsigned HOST_WIDE_INT val = tree_to_hwi (x);
10409
10410 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10411 gcc_assert (cst_fits_shwi_p (x));
10412
10413 if (bits < HOST_BITS_PER_WIDE_INT)
10414 {
10415 bool negative = ((val >> (bits - 1)) & 1) != 0;
10416 if (negative)
10417 val |= (~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1;
10418 else
10419 val &= ~((~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1);
10420 }
10421
10422 return val;
10423 }
10424
10425 /* Return value of a constant X and sign-extend it. */
10426
10427 HOST_WIDEST_INT
10428 widest_int_cst_value (const_tree x)
10429 {
10430 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10431 unsigned HOST_WIDEST_INT val = tree_to_hwi (x);
10432
10433 #if HOST_BITS_PER_WIDEST_INT > HOST_BITS_PER_WIDE_INT
10434 gcc_assert (HOST_BITS_PER_WIDEST_INT >= HOST_BITS_PER_DOUBLE_INT);
10435 gcc_assert (TREE_INT_CST_NUNITS (x) == 2);
10436
10437 if (TREE_INT_CST_NUNITS (x) == 1)
10438 val = HOST_WIDE_INT (val);
10439 else
10440 val |= (((unsigned HOST_WIDEST_INT) TREE_INT_CST_ELT (x, 1))
10441 << HOST_BITS_PER_WIDE_INT);
10442 #else
10443 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10444 gcc_assert (TREE_INT_CST_NUNITS (x) == 1);
10445 #endif
10446
10447 if (bits < HOST_BITS_PER_WIDEST_INT)
10448 {
10449 bool negative = ((val >> (bits - 1)) & 1) != 0;
10450 if (negative)
10451 val |= (~(unsigned HOST_WIDEST_INT) 0) << (bits - 1) << 1;
10452 else
10453 val &= ~((~(unsigned HOST_WIDEST_INT) 0) << (bits - 1) << 1);
10454 }
10455
10456 return val;
10457 }
10458
10459 /* If TYPE is an integral or pointer type, return an integer type with
10460 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10461 if TYPE is already an integer type of signedness UNSIGNEDP. */
10462
10463 tree
10464 signed_or_unsigned_type_for (int unsignedp, tree type)
10465 {
10466 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
10467 return type;
10468
10469 if (TREE_CODE (type) == VECTOR_TYPE)
10470 {
10471 tree inner = TREE_TYPE (type);
10472 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10473 if (!inner2)
10474 return NULL_TREE;
10475 if (inner == inner2)
10476 return type;
10477 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
10478 }
10479
10480 if (!INTEGRAL_TYPE_P (type)
10481 && !POINTER_TYPE_P (type))
10482 return NULL_TREE;
10483
10484 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
10485 }
10486
10487 /* If TYPE is an integral or pointer type, return an integer type with
10488 the same precision which is unsigned, or itself if TYPE is already an
10489 unsigned integer type. */
10490
10491 tree
10492 unsigned_type_for (tree type)
10493 {
10494 return signed_or_unsigned_type_for (1, type);
10495 }
10496
10497 /* If TYPE is an integral or pointer type, return an integer type with
10498 the same precision which is signed, or itself if TYPE is already a
10499 signed integer type. */
10500
10501 tree
10502 signed_type_for (tree type)
10503 {
10504 return signed_or_unsigned_type_for (0, type);
10505 }
10506
10507 /* If TYPE is a vector type, return a signed integer vector type with the
10508 same width and number of subparts. Otherwise return boolean_type_node. */
10509
10510 tree
10511 truth_type_for (tree type)
10512 {
10513 if (TREE_CODE (type) == VECTOR_TYPE)
10514 {
10515 tree elem = lang_hooks.types.type_for_size
10516 (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))), 0);
10517 return build_opaque_vector_type (elem, TYPE_VECTOR_SUBPARTS (type));
10518 }
10519 else
10520 return boolean_type_node;
10521 }
10522
10523 /* Returns the largest value obtainable by casting something in INNER type to
10524 OUTER type. */
10525
10526 tree
10527 upper_bound_in_type (tree outer, tree inner)
10528 {
10529 unsigned int det = 0;
10530 unsigned oprec = TYPE_PRECISION (outer);
10531 unsigned iprec = TYPE_PRECISION (inner);
10532 unsigned prec;
10533
10534 /* Compute a unique number for every combination. */
10535 det |= (oprec > iprec) ? 4 : 0;
10536 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
10537 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
10538
10539 /* Determine the exponent to use. */
10540 switch (det)
10541 {
10542 case 0:
10543 case 1:
10544 /* oprec <= iprec, outer: signed, inner: don't care. */
10545 prec = oprec - 1;
10546 break;
10547 case 2:
10548 case 3:
10549 /* oprec <= iprec, outer: unsigned, inner: don't care. */
10550 prec = oprec;
10551 break;
10552 case 4:
10553 /* oprec > iprec, outer: signed, inner: signed. */
10554 prec = iprec - 1;
10555 break;
10556 case 5:
10557 /* oprec > iprec, outer: signed, inner: unsigned. */
10558 prec = iprec;
10559 break;
10560 case 6:
10561 /* oprec > iprec, outer: unsigned, inner: signed. */
10562 prec = oprec;
10563 break;
10564 case 7:
10565 /* oprec > iprec, outer: unsigned, inner: unsigned. */
10566 prec = iprec;
10567 break;
10568 default:
10569 gcc_unreachable ();
10570 }
10571
10572 return wide_int_to_tree (outer,
10573 wi::mask (prec, false, TYPE_PRECISION (outer)));
10574 }
10575
10576 /* Returns the smallest value obtainable by casting something in INNER type to
10577 OUTER type. */
10578
10579 tree
10580 lower_bound_in_type (tree outer, tree inner)
10581 {
10582 unsigned oprec = TYPE_PRECISION (outer);
10583 unsigned iprec = TYPE_PRECISION (inner);
10584
10585 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
10586 and obtain 0. */
10587 if (TYPE_UNSIGNED (outer)
10588 /* If we are widening something of an unsigned type, OUTER type
10589 contains all values of INNER type. In particular, both INNER
10590 and OUTER types have zero in common. */
10591 || (oprec > iprec && TYPE_UNSIGNED (inner)))
10592 return build_int_cst (outer, 0);
10593 else
10594 {
10595 /* If we are widening a signed type to another signed type, we
10596 want to obtain -2^^(iprec-1). If we are keeping the
10597 precision or narrowing to a signed type, we want to obtain
10598 -2^(oprec-1). */
10599 unsigned prec = oprec > iprec ? iprec : oprec;
10600 return wide_int_to_tree (outer,
10601 wi::mask (prec - 1, true,
10602 TYPE_PRECISION (outer)));
10603 }
10604 }
10605
10606 /* Return nonzero if two operands that are suitable for PHI nodes are
10607 necessarily equal. Specifically, both ARG0 and ARG1 must be either
10608 SSA_NAME or invariant. Note that this is strictly an optimization.
10609 That is, callers of this function can directly call operand_equal_p
10610 and get the same result, only slower. */
10611
10612 int
10613 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
10614 {
10615 if (arg0 == arg1)
10616 return 1;
10617 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
10618 return 0;
10619 return operand_equal_p (arg0, arg1, 0);
10620 }
10621
10622 /* Returns number of zeros at the end of binary representation of X. */
10623
10624 tree
10625 num_ending_zeros (const_tree x)
10626 {
10627 return build_int_cst (TREE_TYPE (x), wi::ctz (x));
10628 }
10629
10630
10631 #define WALK_SUBTREE(NODE) \
10632 do \
10633 { \
10634 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
10635 if (result) \
10636 return result; \
10637 } \
10638 while (0)
10639
10640 /* This is a subroutine of walk_tree that walks field of TYPE that are to
10641 be walked whenever a type is seen in the tree. Rest of operands and return
10642 value are as for walk_tree. */
10643
10644 static tree
10645 walk_type_fields (tree type, walk_tree_fn func, void *data,
10646 struct pointer_set_t *pset, walk_tree_lh lh)
10647 {
10648 tree result = NULL_TREE;
10649
10650 switch (TREE_CODE (type))
10651 {
10652 case POINTER_TYPE:
10653 case REFERENCE_TYPE:
10654 /* We have to worry about mutually recursive pointers. These can't
10655 be written in C. They can in Ada. It's pathological, but
10656 there's an ACATS test (c38102a) that checks it. Deal with this
10657 by checking if we're pointing to another pointer, that one
10658 points to another pointer, that one does too, and we have no htab.
10659 If so, get a hash table. We check three levels deep to avoid
10660 the cost of the hash table if we don't need one. */
10661 if (POINTER_TYPE_P (TREE_TYPE (type))
10662 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
10663 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
10664 && !pset)
10665 {
10666 result = walk_tree_without_duplicates (&TREE_TYPE (type),
10667 func, data);
10668 if (result)
10669 return result;
10670
10671 break;
10672 }
10673
10674 /* ... fall through ... */
10675
10676 case COMPLEX_TYPE:
10677 WALK_SUBTREE (TREE_TYPE (type));
10678 break;
10679
10680 case METHOD_TYPE:
10681 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
10682
10683 /* Fall through. */
10684
10685 case FUNCTION_TYPE:
10686 WALK_SUBTREE (TREE_TYPE (type));
10687 {
10688 tree arg;
10689
10690 /* We never want to walk into default arguments. */
10691 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
10692 WALK_SUBTREE (TREE_VALUE (arg));
10693 }
10694 break;
10695
10696 case ARRAY_TYPE:
10697 /* Don't follow this nodes's type if a pointer for fear that
10698 we'll have infinite recursion. If we have a PSET, then we
10699 need not fear. */
10700 if (pset
10701 || (!POINTER_TYPE_P (TREE_TYPE (type))
10702 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
10703 WALK_SUBTREE (TREE_TYPE (type));
10704 WALK_SUBTREE (TYPE_DOMAIN (type));
10705 break;
10706
10707 case OFFSET_TYPE:
10708 WALK_SUBTREE (TREE_TYPE (type));
10709 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
10710 break;
10711
10712 default:
10713 break;
10714 }
10715
10716 return NULL_TREE;
10717 }
10718
10719 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
10720 called with the DATA and the address of each sub-tree. If FUNC returns a
10721 non-NULL value, the traversal is stopped, and the value returned by FUNC
10722 is returned. If PSET is non-NULL it is used to record the nodes visited,
10723 and to avoid visiting a node more than once. */
10724
10725 tree
10726 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
10727 struct pointer_set_t *pset, walk_tree_lh lh)
10728 {
10729 enum tree_code code;
10730 int walk_subtrees;
10731 tree result;
10732
10733 #define WALK_SUBTREE_TAIL(NODE) \
10734 do \
10735 { \
10736 tp = & (NODE); \
10737 goto tail_recurse; \
10738 } \
10739 while (0)
10740
10741 tail_recurse:
10742 /* Skip empty subtrees. */
10743 if (!*tp)
10744 return NULL_TREE;
10745
10746 /* Don't walk the same tree twice, if the user has requested
10747 that we avoid doing so. */
10748 if (pset && pointer_set_insert (pset, *tp))
10749 return NULL_TREE;
10750
10751 /* Call the function. */
10752 walk_subtrees = 1;
10753 result = (*func) (tp, &walk_subtrees, data);
10754
10755 /* If we found something, return it. */
10756 if (result)
10757 return result;
10758
10759 code = TREE_CODE (*tp);
10760
10761 /* Even if we didn't, FUNC may have decided that there was nothing
10762 interesting below this point in the tree. */
10763 if (!walk_subtrees)
10764 {
10765 /* But we still need to check our siblings. */
10766 if (code == TREE_LIST)
10767 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
10768 else if (code == OMP_CLAUSE)
10769 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
10770 else
10771 return NULL_TREE;
10772 }
10773
10774 if (lh)
10775 {
10776 result = (*lh) (tp, &walk_subtrees, func, data, pset);
10777 if (result || !walk_subtrees)
10778 return result;
10779 }
10780
10781 switch (code)
10782 {
10783 case ERROR_MARK:
10784 case IDENTIFIER_NODE:
10785 case INTEGER_CST:
10786 case REAL_CST:
10787 case FIXED_CST:
10788 case VECTOR_CST:
10789 case STRING_CST:
10790 case BLOCK:
10791 case PLACEHOLDER_EXPR:
10792 case SSA_NAME:
10793 case FIELD_DECL:
10794 case RESULT_DECL:
10795 /* None of these have subtrees other than those already walked
10796 above. */
10797 break;
10798
10799 case TREE_LIST:
10800 WALK_SUBTREE (TREE_VALUE (*tp));
10801 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
10802 break;
10803
10804 case TREE_VEC:
10805 {
10806 int len = TREE_VEC_LENGTH (*tp);
10807
10808 if (len == 0)
10809 break;
10810
10811 /* Walk all elements but the first. */
10812 while (--len)
10813 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
10814
10815 /* Now walk the first one as a tail call. */
10816 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
10817 }
10818
10819 case COMPLEX_CST:
10820 WALK_SUBTREE (TREE_REALPART (*tp));
10821 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
10822
10823 case CONSTRUCTOR:
10824 {
10825 unsigned HOST_WIDE_INT idx;
10826 constructor_elt *ce;
10827
10828 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
10829 idx++)
10830 WALK_SUBTREE (ce->value);
10831 }
10832 break;
10833
10834 case SAVE_EXPR:
10835 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
10836
10837 case BIND_EXPR:
10838 {
10839 tree decl;
10840 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
10841 {
10842 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
10843 into declarations that are just mentioned, rather than
10844 declared; they don't really belong to this part of the tree.
10845 And, we can see cycles: the initializer for a declaration
10846 can refer to the declaration itself. */
10847 WALK_SUBTREE (DECL_INITIAL (decl));
10848 WALK_SUBTREE (DECL_SIZE (decl));
10849 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
10850 }
10851 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
10852 }
10853
10854 case STATEMENT_LIST:
10855 {
10856 tree_stmt_iterator i;
10857 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
10858 WALK_SUBTREE (*tsi_stmt_ptr (i));
10859 }
10860 break;
10861
10862 case OMP_CLAUSE:
10863 switch (OMP_CLAUSE_CODE (*tp))
10864 {
10865 case OMP_CLAUSE_PRIVATE:
10866 case OMP_CLAUSE_SHARED:
10867 case OMP_CLAUSE_FIRSTPRIVATE:
10868 case OMP_CLAUSE_COPYIN:
10869 case OMP_CLAUSE_COPYPRIVATE:
10870 case OMP_CLAUSE_FINAL:
10871 case OMP_CLAUSE_IF:
10872 case OMP_CLAUSE_NUM_THREADS:
10873 case OMP_CLAUSE_SCHEDULE:
10874 case OMP_CLAUSE_UNIFORM:
10875 case OMP_CLAUSE_DEPEND:
10876 case OMP_CLAUSE_NUM_TEAMS:
10877 case OMP_CLAUSE_THREAD_LIMIT:
10878 case OMP_CLAUSE_DEVICE:
10879 case OMP_CLAUSE_DIST_SCHEDULE:
10880 case OMP_CLAUSE_SAFELEN:
10881 case OMP_CLAUSE_SIMDLEN:
10882 case OMP_CLAUSE__LOOPTEMP_:
10883 case OMP_CLAUSE__SIMDUID_:
10884 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
10885 /* FALLTHRU */
10886
10887 case OMP_CLAUSE_NOWAIT:
10888 case OMP_CLAUSE_ORDERED:
10889 case OMP_CLAUSE_DEFAULT:
10890 case OMP_CLAUSE_UNTIED:
10891 case OMP_CLAUSE_MERGEABLE:
10892 case OMP_CLAUSE_PROC_BIND:
10893 case OMP_CLAUSE_INBRANCH:
10894 case OMP_CLAUSE_NOTINBRANCH:
10895 case OMP_CLAUSE_FOR:
10896 case OMP_CLAUSE_PARALLEL:
10897 case OMP_CLAUSE_SECTIONS:
10898 case OMP_CLAUSE_TASKGROUP:
10899 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
10900
10901 case OMP_CLAUSE_LASTPRIVATE:
10902 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
10903 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
10904 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
10905
10906 case OMP_CLAUSE_COLLAPSE:
10907 {
10908 int i;
10909 for (i = 0; i < 3; i++)
10910 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
10911 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
10912 }
10913
10914 case OMP_CLAUSE_ALIGNED:
10915 case OMP_CLAUSE_LINEAR:
10916 case OMP_CLAUSE_FROM:
10917 case OMP_CLAUSE_TO:
10918 case OMP_CLAUSE_MAP:
10919 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
10920 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
10921 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
10922
10923 case OMP_CLAUSE_REDUCTION:
10924 {
10925 int i;
10926 for (i = 0; i < 4; i++)
10927 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
10928 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
10929 }
10930
10931 default:
10932 gcc_unreachable ();
10933 }
10934 break;
10935
10936 case TARGET_EXPR:
10937 {
10938 int i, len;
10939
10940 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
10941 But, we only want to walk once. */
10942 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
10943 for (i = 0; i < len; ++i)
10944 WALK_SUBTREE (TREE_OPERAND (*tp, i));
10945 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
10946 }
10947
10948 case DECL_EXPR:
10949 /* If this is a TYPE_DECL, walk into the fields of the type that it's
10950 defining. We only want to walk into these fields of a type in this
10951 case and not in the general case of a mere reference to the type.
10952
10953 The criterion is as follows: if the field can be an expression, it
10954 must be walked only here. This should be in keeping with the fields
10955 that are directly gimplified in gimplify_type_sizes in order for the
10956 mark/copy-if-shared/unmark machinery of the gimplifier to work with
10957 variable-sized types.
10958
10959 Note that DECLs get walked as part of processing the BIND_EXPR. */
10960 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
10961 {
10962 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
10963 if (TREE_CODE (*type_p) == ERROR_MARK)
10964 return NULL_TREE;
10965
10966 /* Call the function for the type. See if it returns anything or
10967 doesn't want us to continue. If we are to continue, walk both
10968 the normal fields and those for the declaration case. */
10969 result = (*func) (type_p, &walk_subtrees, data);
10970 if (result || !walk_subtrees)
10971 return result;
10972
10973 /* But do not walk a pointed-to type since it may itself need to
10974 be walked in the declaration case if it isn't anonymous. */
10975 if (!POINTER_TYPE_P (*type_p))
10976 {
10977 result = walk_type_fields (*type_p, func, data, pset, lh);
10978 if (result)
10979 return result;
10980 }
10981
10982 /* If this is a record type, also walk the fields. */
10983 if (RECORD_OR_UNION_TYPE_P (*type_p))
10984 {
10985 tree field;
10986
10987 for (field = TYPE_FIELDS (*type_p); field;
10988 field = DECL_CHAIN (field))
10989 {
10990 /* We'd like to look at the type of the field, but we can
10991 easily get infinite recursion. So assume it's pointed
10992 to elsewhere in the tree. Also, ignore things that
10993 aren't fields. */
10994 if (TREE_CODE (field) != FIELD_DECL)
10995 continue;
10996
10997 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
10998 WALK_SUBTREE (DECL_SIZE (field));
10999 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11000 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11001 WALK_SUBTREE (DECL_QUALIFIER (field));
11002 }
11003 }
11004
11005 /* Same for scalar types. */
11006 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11007 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11008 || TREE_CODE (*type_p) == INTEGER_TYPE
11009 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11010 || TREE_CODE (*type_p) == REAL_TYPE)
11011 {
11012 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11013 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11014 }
11015
11016 WALK_SUBTREE (TYPE_SIZE (*type_p));
11017 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11018 }
11019 /* FALLTHRU */
11020
11021 default:
11022 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11023 {
11024 int i, len;
11025
11026 /* Walk over all the sub-trees of this operand. */
11027 len = TREE_OPERAND_LENGTH (*tp);
11028
11029 /* Go through the subtrees. We need to do this in forward order so
11030 that the scope of a FOR_EXPR is handled properly. */
11031 if (len)
11032 {
11033 for (i = 0; i < len - 1; ++i)
11034 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11035 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11036 }
11037 }
11038 /* If this is a type, walk the needed fields in the type. */
11039 else if (TYPE_P (*tp))
11040 return walk_type_fields (*tp, func, data, pset, lh);
11041 break;
11042 }
11043
11044 /* We didn't find what we were looking for. */
11045 return NULL_TREE;
11046
11047 #undef WALK_SUBTREE_TAIL
11048 }
11049 #undef WALK_SUBTREE
11050
11051 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11052
11053 tree
11054 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11055 walk_tree_lh lh)
11056 {
11057 tree result;
11058 struct pointer_set_t *pset;
11059
11060 pset = pointer_set_create ();
11061 result = walk_tree_1 (tp, func, data, pset, lh);
11062 pointer_set_destroy (pset);
11063 return result;
11064 }
11065
11066
11067 tree
11068 tree_block (tree t)
11069 {
11070 char const c = TREE_CODE_CLASS (TREE_CODE (t));
11071
11072 if (IS_EXPR_CODE_CLASS (c))
11073 return LOCATION_BLOCK (t->exp.locus);
11074 gcc_unreachable ();
11075 return NULL;
11076 }
11077
11078 void
11079 tree_set_block (tree t, tree b)
11080 {
11081 char const c = TREE_CODE_CLASS (TREE_CODE (t));
11082
11083 if (IS_EXPR_CODE_CLASS (c))
11084 {
11085 if (b)
11086 t->exp.locus = COMBINE_LOCATION_DATA (line_table, t->exp.locus, b);
11087 else
11088 t->exp.locus = LOCATION_LOCUS (t->exp.locus);
11089 }
11090 else
11091 gcc_unreachable ();
11092 }
11093
11094 /* Create a nameless artificial label and put it in the current
11095 function context. The label has a location of LOC. Returns the
11096 newly created label. */
11097
11098 tree
11099 create_artificial_label (location_t loc)
11100 {
11101 tree lab = build_decl (loc,
11102 LABEL_DECL, NULL_TREE, void_type_node);
11103
11104 DECL_ARTIFICIAL (lab) = 1;
11105 DECL_IGNORED_P (lab) = 1;
11106 DECL_CONTEXT (lab) = current_function_decl;
11107 return lab;
11108 }
11109
11110 /* Given a tree, try to return a useful variable name that we can use
11111 to prefix a temporary that is being assigned the value of the tree.
11112 I.E. given <temp> = &A, return A. */
11113
11114 const char *
11115 get_name (tree t)
11116 {
11117 tree stripped_decl;
11118
11119 stripped_decl = t;
11120 STRIP_NOPS (stripped_decl);
11121 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11122 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11123 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11124 {
11125 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11126 if (!name)
11127 return NULL;
11128 return IDENTIFIER_POINTER (name);
11129 }
11130 else
11131 {
11132 switch (TREE_CODE (stripped_decl))
11133 {
11134 case ADDR_EXPR:
11135 return get_name (TREE_OPERAND (stripped_decl, 0));
11136 default:
11137 return NULL;
11138 }
11139 }
11140 }
11141
11142 /* Return true if TYPE has a variable argument list. */
11143
11144 bool
11145 stdarg_p (const_tree fntype)
11146 {
11147 function_args_iterator args_iter;
11148 tree n = NULL_TREE, t;
11149
11150 if (!fntype)
11151 return false;
11152
11153 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11154 {
11155 n = t;
11156 }
11157
11158 return n != NULL_TREE && n != void_type_node;
11159 }
11160
11161 /* Return true if TYPE has a prototype. */
11162
11163 bool
11164 prototype_p (tree fntype)
11165 {
11166 tree t;
11167
11168 gcc_assert (fntype != NULL_TREE);
11169
11170 t = TYPE_ARG_TYPES (fntype);
11171 return (t != NULL_TREE);
11172 }
11173
11174 /* If BLOCK is inlined from an __attribute__((__artificial__))
11175 routine, return pointer to location from where it has been
11176 called. */
11177 location_t *
11178 block_nonartificial_location (tree block)
11179 {
11180 location_t *ret = NULL;
11181
11182 while (block && TREE_CODE (block) == BLOCK
11183 && BLOCK_ABSTRACT_ORIGIN (block))
11184 {
11185 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11186
11187 while (TREE_CODE (ao) == BLOCK
11188 && BLOCK_ABSTRACT_ORIGIN (ao)
11189 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
11190 ao = BLOCK_ABSTRACT_ORIGIN (ao);
11191
11192 if (TREE_CODE (ao) == FUNCTION_DECL)
11193 {
11194 /* If AO is an artificial inline, point RET to the
11195 call site locus at which it has been inlined and continue
11196 the loop, in case AO's caller is also an artificial
11197 inline. */
11198 if (DECL_DECLARED_INLINE_P (ao)
11199 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11200 ret = &BLOCK_SOURCE_LOCATION (block);
11201 else
11202 break;
11203 }
11204 else if (TREE_CODE (ao) != BLOCK)
11205 break;
11206
11207 block = BLOCK_SUPERCONTEXT (block);
11208 }
11209 return ret;
11210 }
11211
11212
11213 /* If EXP is inlined from an __attribute__((__artificial__))
11214 function, return the location of the original call expression. */
11215
11216 location_t
11217 tree_nonartificial_location (tree exp)
11218 {
11219 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11220
11221 if (loc)
11222 return *loc;
11223 else
11224 return EXPR_LOCATION (exp);
11225 }
11226
11227
11228 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
11229 nodes. */
11230
11231 /* Return the hash code code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11232
11233 static hashval_t
11234 cl_option_hash_hash (const void *x)
11235 {
11236 const_tree const t = (const_tree) x;
11237 const char *p;
11238 size_t i;
11239 size_t len = 0;
11240 hashval_t hash = 0;
11241
11242 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11243 {
11244 p = (const char *)TREE_OPTIMIZATION (t);
11245 len = sizeof (struct cl_optimization);
11246 }
11247
11248 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11249 {
11250 p = (const char *)TREE_TARGET_OPTION (t);
11251 len = sizeof (struct cl_target_option);
11252 }
11253
11254 else
11255 gcc_unreachable ();
11256
11257 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11258 something else. */
11259 for (i = 0; i < len; i++)
11260 if (p[i])
11261 hash = (hash << 4) ^ ((i << 2) | p[i]);
11262
11263 return hash;
11264 }
11265
11266 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11267 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11268 same. */
11269
11270 static int
11271 cl_option_hash_eq (const void *x, const void *y)
11272 {
11273 const_tree const xt = (const_tree) x;
11274 const_tree const yt = (const_tree) y;
11275 const char *xp;
11276 const char *yp;
11277 size_t len;
11278
11279 if (TREE_CODE (xt) != TREE_CODE (yt))
11280 return 0;
11281
11282 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11283 {
11284 xp = (const char *)TREE_OPTIMIZATION (xt);
11285 yp = (const char *)TREE_OPTIMIZATION (yt);
11286 len = sizeof (struct cl_optimization);
11287 }
11288
11289 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11290 {
11291 xp = (const char *)TREE_TARGET_OPTION (xt);
11292 yp = (const char *)TREE_TARGET_OPTION (yt);
11293 len = sizeof (struct cl_target_option);
11294 }
11295
11296 else
11297 gcc_unreachable ();
11298
11299 return (memcmp (xp, yp, len) == 0);
11300 }
11301
11302 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
11303
11304 tree
11305 build_optimization_node (struct gcc_options *opts)
11306 {
11307 tree t;
11308 void **slot;
11309
11310 /* Use the cache of optimization nodes. */
11311
11312 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11313 opts);
11314
11315 slot = htab_find_slot (cl_option_hash_table, cl_optimization_node, INSERT);
11316 t = (tree) *slot;
11317 if (!t)
11318 {
11319 /* Insert this one into the hash table. */
11320 t = cl_optimization_node;
11321 *slot = t;
11322
11323 /* Make a new node for next time round. */
11324 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11325 }
11326
11327 return t;
11328 }
11329
11330 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
11331
11332 tree
11333 build_target_option_node (struct gcc_options *opts)
11334 {
11335 tree t;
11336 void **slot;
11337
11338 /* Use the cache of optimization nodes. */
11339
11340 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11341 opts);
11342
11343 slot = htab_find_slot (cl_option_hash_table, cl_target_option_node, INSERT);
11344 t = (tree) *slot;
11345 if (!t)
11346 {
11347 /* Insert this one into the hash table. */
11348 t = cl_target_option_node;
11349 *slot = t;
11350
11351 /* Make a new node for next time round. */
11352 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11353 }
11354
11355 return t;
11356 }
11357
11358 /* Determine the "ultimate origin" of a block. The block may be an inlined
11359 instance of an inlined instance of a block which is local to an inline
11360 function, so we have to trace all of the way back through the origin chain
11361 to find out what sort of node actually served as the original seed for the
11362 given block. */
11363
11364 tree
11365 block_ultimate_origin (const_tree block)
11366 {
11367 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
11368
11369 /* output_inline_function sets BLOCK_ABSTRACT_ORIGIN for all the
11370 nodes in the function to point to themselves; ignore that if
11371 we're trying to output the abstract instance of this function. */
11372 if (BLOCK_ABSTRACT (block) && immediate_origin == block)
11373 return NULL_TREE;
11374
11375 if (immediate_origin == NULL_TREE)
11376 return NULL_TREE;
11377 else
11378 {
11379 tree ret_val;
11380 tree lookahead = immediate_origin;
11381
11382 do
11383 {
11384 ret_val = lookahead;
11385 lookahead = (TREE_CODE (ret_val) == BLOCK
11386 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
11387 }
11388 while (lookahead != NULL && lookahead != ret_val);
11389
11390 /* The block's abstract origin chain may not be the *ultimate* origin of
11391 the block. It could lead to a DECL that has an abstract origin set.
11392 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
11393 will give us if it has one). Note that DECL's abstract origins are
11394 supposed to be the most distant ancestor (or so decl_ultimate_origin
11395 claims), so we don't need to loop following the DECL origins. */
11396 if (DECL_P (ret_val))
11397 return DECL_ORIGIN (ret_val);
11398
11399 return ret_val;
11400 }
11401 }
11402
11403 /* Return true iff conversion in EXP generates no instruction. Mark
11404 it inline so that we fully inline into the stripping functions even
11405 though we have two uses of this function. */
11406
11407 static inline bool
11408 tree_nop_conversion (const_tree exp)
11409 {
11410 tree outer_type, inner_type;
11411
11412 if (!CONVERT_EXPR_P (exp)
11413 && TREE_CODE (exp) != NON_LVALUE_EXPR)
11414 return false;
11415 if (TREE_OPERAND (exp, 0) == error_mark_node)
11416 return false;
11417
11418 outer_type = TREE_TYPE (exp);
11419 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11420
11421 if (!inner_type)
11422 return false;
11423
11424 /* Use precision rather then machine mode when we can, which gives
11425 the correct answer even for submode (bit-field) types. */
11426 if ((INTEGRAL_TYPE_P (outer_type)
11427 || POINTER_TYPE_P (outer_type)
11428 || TREE_CODE (outer_type) == OFFSET_TYPE)
11429 && (INTEGRAL_TYPE_P (inner_type)
11430 || POINTER_TYPE_P (inner_type)
11431 || TREE_CODE (inner_type) == OFFSET_TYPE))
11432 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
11433
11434 /* Otherwise fall back on comparing machine modes (e.g. for
11435 aggregate types, floats). */
11436 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
11437 }
11438
11439 /* Return true iff conversion in EXP generates no instruction. Don't
11440 consider conversions changing the signedness. */
11441
11442 static bool
11443 tree_sign_nop_conversion (const_tree exp)
11444 {
11445 tree outer_type, inner_type;
11446
11447 if (!tree_nop_conversion (exp))
11448 return false;
11449
11450 outer_type = TREE_TYPE (exp);
11451 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11452
11453 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
11454 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
11455 }
11456
11457 /* Strip conversions from EXP according to tree_nop_conversion and
11458 return the resulting expression. */
11459
11460 tree
11461 tree_strip_nop_conversions (tree exp)
11462 {
11463 while (tree_nop_conversion (exp))
11464 exp = TREE_OPERAND (exp, 0);
11465 return exp;
11466 }
11467
11468 /* Strip conversions from EXP according to tree_sign_nop_conversion
11469 and return the resulting expression. */
11470
11471 tree
11472 tree_strip_sign_nop_conversions (tree exp)
11473 {
11474 while (tree_sign_nop_conversion (exp))
11475 exp = TREE_OPERAND (exp, 0);
11476 return exp;
11477 }
11478
11479 /* Avoid any floating point extensions from EXP. */
11480 tree
11481 strip_float_extensions (tree exp)
11482 {
11483 tree sub, expt, subt;
11484
11485 /* For floating point constant look up the narrowest type that can hold
11486 it properly and handle it like (type)(narrowest_type)constant.
11487 This way we can optimize for instance a=a*2.0 where "a" is float
11488 but 2.0 is double constant. */
11489 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
11490 {
11491 REAL_VALUE_TYPE orig;
11492 tree type = NULL;
11493
11494 orig = TREE_REAL_CST (exp);
11495 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
11496 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
11497 type = float_type_node;
11498 else if (TYPE_PRECISION (TREE_TYPE (exp))
11499 > TYPE_PRECISION (double_type_node)
11500 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
11501 type = double_type_node;
11502 if (type)
11503 return build_real (type, real_value_truncate (TYPE_MODE (type), orig));
11504 }
11505
11506 if (!CONVERT_EXPR_P (exp))
11507 return exp;
11508
11509 sub = TREE_OPERAND (exp, 0);
11510 subt = TREE_TYPE (sub);
11511 expt = TREE_TYPE (exp);
11512
11513 if (!FLOAT_TYPE_P (subt))
11514 return exp;
11515
11516 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
11517 return exp;
11518
11519 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
11520 return exp;
11521
11522 return strip_float_extensions (sub);
11523 }
11524
11525 /* Strip out all handled components that produce invariant
11526 offsets. */
11527
11528 const_tree
11529 strip_invariant_refs (const_tree op)
11530 {
11531 while (handled_component_p (op))
11532 {
11533 switch (TREE_CODE (op))
11534 {
11535 case ARRAY_REF:
11536 case ARRAY_RANGE_REF:
11537 if (!is_gimple_constant (TREE_OPERAND (op, 1))
11538 || TREE_OPERAND (op, 2) != NULL_TREE
11539 || TREE_OPERAND (op, 3) != NULL_TREE)
11540 return NULL;
11541 break;
11542
11543 case COMPONENT_REF:
11544 if (TREE_OPERAND (op, 2) != NULL_TREE)
11545 return NULL;
11546 break;
11547
11548 default:;
11549 }
11550 op = TREE_OPERAND (op, 0);
11551 }
11552
11553 return op;
11554 }
11555
11556 static GTY(()) tree gcc_eh_personality_decl;
11557
11558 /* Return the GCC personality function decl. */
11559
11560 tree
11561 lhd_gcc_personality (void)
11562 {
11563 if (!gcc_eh_personality_decl)
11564 gcc_eh_personality_decl = build_personality_function ("gcc");
11565 return gcc_eh_personality_decl;
11566 }
11567
11568 /* For languages with One Definition Rule, work out if
11569 trees are actually the same even if the tree representation
11570 differs. This handles only decls appearing in TYPE_NAME
11571 and TYPE_CONTEXT. That is NAMESPACE_DECL, TYPE_DECL,
11572 RECORD_TYPE and IDENTIFIER_NODE. */
11573
11574 static bool
11575 same_for_odr (tree t1, tree t2)
11576 {
11577 if (t1 == t2)
11578 return true;
11579 if (!t1 || !t2)
11580 return false;
11581 /* C and C++ FEs differ by using IDENTIFIER_NODE and TYPE_DECL. */
11582 if (TREE_CODE (t1) == IDENTIFIER_NODE
11583 && TREE_CODE (t2) == TYPE_DECL
11584 && DECL_FILE_SCOPE_P (t1))
11585 {
11586 t2 = DECL_NAME (t2);
11587 gcc_assert (TREE_CODE (t2) == IDENTIFIER_NODE);
11588 }
11589 if (TREE_CODE (t2) == IDENTIFIER_NODE
11590 && TREE_CODE (t1) == TYPE_DECL
11591 && DECL_FILE_SCOPE_P (t2))
11592 {
11593 t1 = DECL_NAME (t1);
11594 gcc_assert (TREE_CODE (t1) == IDENTIFIER_NODE);
11595 }
11596 if (TREE_CODE (t1) != TREE_CODE (t2))
11597 return false;
11598 if (TYPE_P (t1))
11599 return types_same_for_odr (t1, t2);
11600 if (DECL_P (t1))
11601 return decls_same_for_odr (t1, t2);
11602 return false;
11603 }
11604
11605 /* For languages with One Definition Rule, work out if
11606 decls are actually the same even if the tree representation
11607 differs. This handles only decls appearing in TYPE_NAME
11608 and TYPE_CONTEXT. That is NAMESPACE_DECL, TYPE_DECL,
11609 RECORD_TYPE and IDENTIFIER_NODE. */
11610
11611 static bool
11612 decls_same_for_odr (tree decl1, tree decl2)
11613 {
11614 if (decl1 && TREE_CODE (decl1) == TYPE_DECL
11615 && DECL_ORIGINAL_TYPE (decl1))
11616 decl1 = DECL_ORIGINAL_TYPE (decl1);
11617 if (decl2 && TREE_CODE (decl2) == TYPE_DECL
11618 && DECL_ORIGINAL_TYPE (decl2))
11619 decl2 = DECL_ORIGINAL_TYPE (decl2);
11620 if (decl1 == decl2)
11621 return true;
11622 if (!decl1 || !decl2)
11623 return false;
11624 gcc_checking_assert (DECL_P (decl1) && DECL_P (decl2));
11625 if (TREE_CODE (decl1) != TREE_CODE (decl2))
11626 return false;
11627 if (TREE_CODE (decl1) == TRANSLATION_UNIT_DECL)
11628 return true;
11629 if (TREE_CODE (decl1) != NAMESPACE_DECL
11630 && TREE_CODE (decl1) != TYPE_DECL)
11631 return false;
11632 if (!DECL_NAME (decl1))
11633 return false;
11634 gcc_checking_assert (TREE_CODE (DECL_NAME (decl1)) == IDENTIFIER_NODE);
11635 gcc_checking_assert (!DECL_NAME (decl2)
11636 || TREE_CODE (DECL_NAME (decl2)) == IDENTIFIER_NODE);
11637 if (DECL_NAME (decl1) != DECL_NAME (decl2))
11638 return false;
11639 return same_for_odr (DECL_CONTEXT (decl1),
11640 DECL_CONTEXT (decl2));
11641 }
11642
11643 /* For languages with One Definition Rule, work out if
11644 types are same even if the tree representation differs.
11645 This is non-trivial for LTO where minnor differences in
11646 the type representation may have prevented type merging
11647 to merge two copies of otherwise equivalent type. */
11648
11649 bool
11650 types_same_for_odr (tree type1, tree type2)
11651 {
11652 gcc_checking_assert (TYPE_P (type1) && TYPE_P (type2));
11653 type1 = TYPE_MAIN_VARIANT (type1);
11654 type2 = TYPE_MAIN_VARIANT (type2);
11655 if (type1 == type2)
11656 return true;
11657
11658 #ifndef ENABLE_CHECKING
11659 if (!in_lto_p)
11660 return false;
11661 #endif
11662
11663 /* Check for anonymous namespaces. Those have !TREE_PUBLIC
11664 on the corresponding TYPE_STUB_DECL. */
11665 if (type_in_anonymous_namespace_p (type1)
11666 || type_in_anonymous_namespace_p (type2))
11667 return false;
11668 /* When assembler name of virtual table is available, it is
11669 easy to compare types for equivalence. */
11670 if (TYPE_BINFO (type1) && TYPE_BINFO (type2)
11671 && BINFO_VTABLE (TYPE_BINFO (type1))
11672 && BINFO_VTABLE (TYPE_BINFO (type2)))
11673 {
11674 tree v1 = BINFO_VTABLE (TYPE_BINFO (type1));
11675 tree v2 = BINFO_VTABLE (TYPE_BINFO (type2));
11676
11677 if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
11678 {
11679 if (TREE_CODE (v2) != POINTER_PLUS_EXPR
11680 || !operand_equal_p (TREE_OPERAND (v1, 1),
11681 TREE_OPERAND (v2, 1), 0))
11682 return false;
11683 v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
11684 v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
11685 }
11686 v1 = DECL_ASSEMBLER_NAME (v1);
11687 v2 = DECL_ASSEMBLER_NAME (v2);
11688 return (v1 == v2);
11689 }
11690
11691 /* FIXME: the code comparing type names consider all instantiations of the
11692 same template to have same name. This is because we have no access
11693 to template parameters. For types with no virtual method tables
11694 we thus can return false positives. At the moment we do not need
11695 to compare types in other scenarios than devirtualization. */
11696
11697 /* If types are not structuraly same, do not bother to contnue.
11698 Match in the remainder of code would mean ODR violation. */
11699 if (!types_compatible_p (type1, type2))
11700 return false;
11701 if (!TYPE_NAME (type1))
11702 return false;
11703 if (!decls_same_for_odr (TYPE_NAME (type1), TYPE_NAME (type2)))
11704 return false;
11705 if (!same_for_odr (TYPE_CONTEXT (type1), TYPE_CONTEXT (type2)))
11706 return false;
11707 /* When not in LTO the MAIN_VARIANT check should be the same. */
11708 gcc_assert (in_lto_p);
11709
11710 return true;
11711 }
11712
11713 /* TARGET is a call target of GIMPLE call statement
11714 (obtained by gimple_call_fn). Return true if it is
11715 OBJ_TYPE_REF representing an virtual call of C++ method.
11716 (As opposed to OBJ_TYPE_REF representing objc calls
11717 through a cast where middle-end devirtualization machinery
11718 can't apply.) */
11719
11720 bool
11721 virtual_method_call_p (tree target)
11722 {
11723 if (TREE_CODE (target) != OBJ_TYPE_REF)
11724 return false;
11725 target = TREE_TYPE (target);
11726 gcc_checking_assert (TREE_CODE (target) == POINTER_TYPE);
11727 target = TREE_TYPE (target);
11728 if (TREE_CODE (target) == FUNCTION_TYPE)
11729 return false;
11730 gcc_checking_assert (TREE_CODE (target) == METHOD_TYPE);
11731 return true;
11732 }
11733
11734 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
11735
11736 tree
11737 obj_type_ref_class (tree ref)
11738 {
11739 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
11740 ref = TREE_TYPE (ref);
11741 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
11742 ref = TREE_TYPE (ref);
11743 /* We look for type THIS points to. ObjC also builds
11744 OBJ_TYPE_REF with non-method calls, Their first parameter
11745 ID however also corresponds to class type. */
11746 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
11747 || TREE_CODE (ref) == FUNCTION_TYPE);
11748 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
11749 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
11750 return TREE_TYPE (ref);
11751 }
11752
11753 /* Return true if T is in anonymous namespace. */
11754
11755 bool
11756 type_in_anonymous_namespace_p (tree t)
11757 {
11758 return (TYPE_STUB_DECL (t) && !TREE_PUBLIC (TYPE_STUB_DECL (t)));
11759 }
11760
11761 /* Try to find a base info of BINFO that would have its field decl at offset
11762 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
11763 found, return, otherwise return NULL_TREE. */
11764
11765 tree
11766 get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
11767 {
11768 tree type = BINFO_TYPE (binfo);
11769
11770 while (true)
11771 {
11772 HOST_WIDE_INT pos, size;
11773 tree fld;
11774 int i;
11775
11776 if (types_same_for_odr (type, expected_type))
11777 return binfo;
11778 if (offset < 0)
11779 return NULL_TREE;
11780
11781 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
11782 {
11783 if (TREE_CODE (fld) != FIELD_DECL)
11784 continue;
11785
11786 pos = int_bit_position (fld);
11787 size = tree_to_uhwi (DECL_SIZE (fld));
11788 if (pos <= offset && (pos + size) > offset)
11789 break;
11790 }
11791 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
11792 return NULL_TREE;
11793
11794 if (!DECL_ARTIFICIAL (fld))
11795 {
11796 binfo = TYPE_BINFO (TREE_TYPE (fld));
11797 if (!binfo)
11798 return NULL_TREE;
11799 }
11800 /* Offset 0 indicates the primary base, whose vtable contents are
11801 represented in the binfo for the derived class. */
11802 else if (offset != 0)
11803 {
11804 tree base_binfo, found_binfo = NULL_TREE;
11805 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
11806 if (types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
11807 {
11808 found_binfo = base_binfo;
11809 break;
11810 }
11811 if (!found_binfo)
11812 return NULL_TREE;
11813 binfo = found_binfo;
11814 }
11815
11816 type = TREE_TYPE (fld);
11817 offset -= pos;
11818 }
11819 }
11820
11821 /* Returns true if X is a typedef decl. */
11822
11823 bool
11824 is_typedef_decl (tree x)
11825 {
11826 return (x && TREE_CODE (x) == TYPE_DECL
11827 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
11828 }
11829
11830 /* Returns true iff TYPE is a type variant created for a typedef. */
11831
11832 bool
11833 typedef_variant_p (tree type)
11834 {
11835 return is_typedef_decl (TYPE_NAME (type));
11836 }
11837
11838 /* Warn about a use of an identifier which was marked deprecated. */
11839 void
11840 warn_deprecated_use (tree node, tree attr)
11841 {
11842 const char *msg;
11843
11844 if (node == 0 || !warn_deprecated_decl)
11845 return;
11846
11847 if (!attr)
11848 {
11849 if (DECL_P (node))
11850 attr = DECL_ATTRIBUTES (node);
11851 else if (TYPE_P (node))
11852 {
11853 tree decl = TYPE_STUB_DECL (node);
11854 if (decl)
11855 attr = lookup_attribute ("deprecated",
11856 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
11857 }
11858 }
11859
11860 if (attr)
11861 attr = lookup_attribute ("deprecated", attr);
11862
11863 if (attr)
11864 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
11865 else
11866 msg = NULL;
11867
11868 if (DECL_P (node))
11869 {
11870 expanded_location xloc = expand_location (DECL_SOURCE_LOCATION (node));
11871 if (msg)
11872 warning (OPT_Wdeprecated_declarations,
11873 "%qD is deprecated (declared at %r%s:%d%R): %s",
11874 node, "locus", xloc.file, xloc.line, msg);
11875 else
11876 warning (OPT_Wdeprecated_declarations,
11877 "%qD is deprecated (declared at %r%s:%d%R)",
11878 node, "locus", xloc.file, xloc.line);
11879 }
11880 else if (TYPE_P (node))
11881 {
11882 tree what = NULL_TREE;
11883 tree decl = TYPE_STUB_DECL (node);
11884
11885 if (TYPE_NAME (node))
11886 {
11887 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
11888 what = TYPE_NAME (node);
11889 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
11890 && DECL_NAME (TYPE_NAME (node)))
11891 what = DECL_NAME (TYPE_NAME (node));
11892 }
11893
11894 if (decl)
11895 {
11896 expanded_location xloc
11897 = expand_location (DECL_SOURCE_LOCATION (decl));
11898 if (what)
11899 {
11900 if (msg)
11901 warning (OPT_Wdeprecated_declarations,
11902 "%qE is deprecated (declared at %r%s:%d%R): %s",
11903 what, "locus", xloc.file, xloc.line, msg);
11904 else
11905 warning (OPT_Wdeprecated_declarations,
11906 "%qE is deprecated (declared at %r%s:%d%R)",
11907 what, "locus", xloc.file, xloc.line);
11908 }
11909 else
11910 {
11911 if (msg)
11912 warning (OPT_Wdeprecated_declarations,
11913 "type is deprecated (declared at %r%s:%d%R): %s",
11914 "locus", xloc.file, xloc.line, msg);
11915 else
11916 warning (OPT_Wdeprecated_declarations,
11917 "type is deprecated (declared at %r%s:%d%R)",
11918 "locus", xloc.file, xloc.line);
11919 }
11920 }
11921 else
11922 {
11923 if (what)
11924 {
11925 if (msg)
11926 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
11927 what, msg);
11928 else
11929 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
11930 }
11931 else
11932 {
11933 if (msg)
11934 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
11935 msg);
11936 else
11937 warning (OPT_Wdeprecated_declarations, "type is deprecated");
11938 }
11939 }
11940 }
11941 }
11942
11943 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
11944 somewhere in it. */
11945
11946 bool
11947 contains_bitfld_component_ref_p (const_tree ref)
11948 {
11949 while (handled_component_p (ref))
11950 {
11951 if (TREE_CODE (ref) == COMPONENT_REF
11952 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
11953 return true;
11954 ref = TREE_OPERAND (ref, 0);
11955 }
11956
11957 return false;
11958 }
11959
11960 /* Try to determine whether a TRY_CATCH expression can fall through.
11961 This is a subroutine of block_may_fallthru. */
11962
11963 static bool
11964 try_catch_may_fallthru (const_tree stmt)
11965 {
11966 tree_stmt_iterator i;
11967
11968 /* If the TRY block can fall through, the whole TRY_CATCH can
11969 fall through. */
11970 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
11971 return true;
11972
11973 i = tsi_start (TREE_OPERAND (stmt, 1));
11974 switch (TREE_CODE (tsi_stmt (i)))
11975 {
11976 case CATCH_EXPR:
11977 /* We expect to see a sequence of CATCH_EXPR trees, each with a
11978 catch expression and a body. The whole TRY_CATCH may fall
11979 through iff any of the catch bodies falls through. */
11980 for (; !tsi_end_p (i); tsi_next (&i))
11981 {
11982 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
11983 return true;
11984 }
11985 return false;
11986
11987 case EH_FILTER_EXPR:
11988 /* The exception filter expression only matters if there is an
11989 exception. If the exception does not match EH_FILTER_TYPES,
11990 we will execute EH_FILTER_FAILURE, and we will fall through
11991 if that falls through. If the exception does match
11992 EH_FILTER_TYPES, the stack unwinder will continue up the
11993 stack, so we will not fall through. We don't know whether we
11994 will throw an exception which matches EH_FILTER_TYPES or not,
11995 so we just ignore EH_FILTER_TYPES and assume that we might
11996 throw an exception which doesn't match. */
11997 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
11998
11999 default:
12000 /* This case represents statements to be executed when an
12001 exception occurs. Those statements are implicitly followed
12002 by a RESX statement to resume execution after the exception.
12003 So in this case the TRY_CATCH never falls through. */
12004 return false;
12005 }
12006 }
12007
12008 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12009 need not be 100% accurate; simply be conservative and return true if we
12010 don't know. This is used only to avoid stupidly generating extra code.
12011 If we're wrong, we'll just delete the extra code later. */
12012
12013 bool
12014 block_may_fallthru (const_tree block)
12015 {
12016 /* This CONST_CAST is okay because expr_last returns its argument
12017 unmodified and we assign it to a const_tree. */
12018 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12019
12020 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12021 {
12022 case GOTO_EXPR:
12023 case RETURN_EXPR:
12024 /* Easy cases. If the last statement of the block implies
12025 control transfer, then we can't fall through. */
12026 return false;
12027
12028 case SWITCH_EXPR:
12029 /* If SWITCH_LABELS is set, this is lowered, and represents a
12030 branch to a selected label and hence can not fall through.
12031 Otherwise SWITCH_BODY is set, and the switch can fall
12032 through. */
12033 return SWITCH_LABELS (stmt) == NULL_TREE;
12034
12035 case COND_EXPR:
12036 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12037 return true;
12038 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12039
12040 case BIND_EXPR:
12041 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12042
12043 case TRY_CATCH_EXPR:
12044 return try_catch_may_fallthru (stmt);
12045
12046 case TRY_FINALLY_EXPR:
12047 /* The finally clause is always executed after the try clause,
12048 so if it does not fall through, then the try-finally will not
12049 fall through. Otherwise, if the try clause does not fall
12050 through, then when the finally clause falls through it will
12051 resume execution wherever the try clause was going. So the
12052 whole try-finally will only fall through if both the try
12053 clause and the finally clause fall through. */
12054 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12055 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12056
12057 case MODIFY_EXPR:
12058 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12059 stmt = TREE_OPERAND (stmt, 1);
12060 else
12061 return true;
12062 /* FALLTHRU */
12063
12064 case CALL_EXPR:
12065 /* Functions that do not return do not fall through. */
12066 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12067
12068 case CLEANUP_POINT_EXPR:
12069 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12070
12071 case TARGET_EXPR:
12072 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12073
12074 case ERROR_MARK:
12075 return true;
12076
12077 default:
12078 return lang_hooks.block_may_fallthru (stmt);
12079 }
12080 }
12081
12082 /* True if we are using EH to handle cleanups. */
12083 static bool using_eh_for_cleanups_flag = false;
12084
12085 /* This routine is called from front ends to indicate eh should be used for
12086 cleanups. */
12087 void
12088 using_eh_for_cleanups (void)
12089 {
12090 using_eh_for_cleanups_flag = true;
12091 }
12092
12093 /* Query whether EH is used for cleanups. */
12094 bool
12095 using_eh_for_cleanups_p (void)
12096 {
12097 return using_eh_for_cleanups_flag;
12098 }
12099
12100 /* Wrapper for tree_code_name to ensure that tree code is valid */
12101 const char *
12102 get_tree_code_name (enum tree_code code)
12103 {
12104 const char *invalid = "<invalid tree code>";
12105
12106 if (code >= MAX_TREE_CODES)
12107 return invalid;
12108
12109 return tree_code_name[code];
12110 }
12111
12112 /* Drops the TREE_OVERFLOW flag from T. */
12113
12114 tree
12115 drop_tree_overflow (tree t)
12116 {
12117 gcc_checking_assert (TREE_OVERFLOW (t));
12118
12119 /* For tree codes with a sharing machinery re-build the result. */
12120 if (TREE_CODE (t) == INTEGER_CST)
12121 return wide_int_to_tree (TREE_TYPE (t), t);
12122
12123 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12124 and drop the flag. */
12125 t = copy_node (t);
12126 TREE_OVERFLOW (t) = 0;
12127 return t;
12128 }
12129
12130 #include "gt-tree.h"