]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree.cc
Update copyright years.
[thirdparty/gcc.git] / gcc / tree.cc
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2023 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
26
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "backend.h"
34 #include "target.h"
35 #include "tree.h"
36 #include "gimple.h"
37 #include "tree-pass.h"
38 #include "ssa.h"
39 #include "cgraph.h"
40 #include "diagnostic.h"
41 #include "flags.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "attribs.h"
47 #include "toplev.h" /* get_random_seed */
48 #include "output.h"
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
55 #include "gimplify.h"
56 #include "tree-dfa.h"
57 #include "langhooks-def.h"
58 #include "tree-diagnostic.h"
59 #include "except.h"
60 #include "builtins.h"
61 #include "print-tree.h"
62 #include "ipa-utils.h"
63 #include "selftest.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 #include "rtl.h"
67 #include "regs.h"
68 #include "tree-vector-builder.h"
69 #include "gimple-fold.h"
70 #include "escaped_string.h"
71 #include "gimple-range.h"
72 #include "gomp-constants.h"
73 #include "dfp.h"
74 #include "asan.h"
75 #include "ubsan.h"
76
77
78
79 /* Names of tree components.
80 Used for printing out the tree and error messages. */
81 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
82 #define END_OF_BASE_TREE_CODES "@dummy",
83
84 static const char *const tree_code_name[] = {
85 #include "all-tree.def"
86 };
87
88 #undef DEFTREECODE
89 #undef END_OF_BASE_TREE_CODES
90
91 /* Each tree code class has an associated string representation.
92 These must correspond to the tree_code_class entries. */
93
94 const char *const tree_code_class_strings[] =
95 {
96 "exceptional",
97 "constant",
98 "type",
99 "declaration",
100 "reference",
101 "comparison",
102 "unary",
103 "binary",
104 "statement",
105 "vl_exp",
106 "expression"
107 };
108
109 /* obstack.[ch] explicitly declined to prototype this. */
110 extern int _obstack_allocated_p (struct obstack *h, void *obj);
111
112 /* Statistics-gathering stuff. */
113
114 static uint64_t tree_code_counts[MAX_TREE_CODES];
115 uint64_t tree_node_counts[(int) all_kinds];
116 uint64_t tree_node_sizes[(int) all_kinds];
117
118 /* Keep in sync with tree.h:enum tree_node_kind. */
119 static const char * const tree_node_kind_names[] = {
120 "decls",
121 "types",
122 "blocks",
123 "stmts",
124 "refs",
125 "exprs",
126 "constants",
127 "identifiers",
128 "vecs",
129 "binfos",
130 "ssa names",
131 "constructors",
132 "random kinds",
133 "lang_decl kinds",
134 "lang_type kinds",
135 "omp clauses",
136 };
137
138 /* Unique id for next decl created. */
139 static GTY(()) int next_decl_uid;
140 /* Unique id for next type created. */
141 static GTY(()) unsigned next_type_uid = 1;
142 /* Unique id for next debug decl created. Use negative numbers,
143 to catch erroneous uses. */
144 static GTY(()) int next_debug_decl_uid;
145
146 /* Since we cannot rehash a type after it is in the table, we have to
147 keep the hash code. */
148
149 struct GTY((for_user)) type_hash {
150 unsigned long hash;
151 tree type;
152 };
153
154 /* Initial size of the hash table (rounded to next prime). */
155 #define TYPE_HASH_INITIAL_SIZE 1000
156
157 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash>
158 {
159 static hashval_t hash (type_hash *t) { return t->hash; }
160 static bool equal (type_hash *a, type_hash *b);
161
162 static int
163 keep_cache_entry (type_hash *&t)
164 {
165 return ggc_marked_p (t->type);
166 }
167 };
168
169 /* Now here is the hash table. When recording a type, it is added to
170 the slot whose index is the hash code. Note that the hash table is
171 used for several kinds of types (function types, array types and
172 array index range types, for now). While all these live in the
173 same table, they are completely independent, and the hash code is
174 computed differently for each of these. */
175
176 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
177
178 /* Hash table and temporary node for larger integer const values. */
179 static GTY (()) tree int_cst_node;
180
181 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
182 {
183 static hashval_t hash (tree t);
184 static bool equal (tree x, tree y);
185 };
186
187 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
188
189 /* Class and variable for making sure that there is a single POLY_INT_CST
190 for a given value. */
191 struct poly_int_cst_hasher : ggc_cache_ptr_hash<tree_node>
192 {
193 typedef std::pair<tree, const poly_wide_int *> compare_type;
194 static hashval_t hash (tree t);
195 static bool equal (tree x, const compare_type &y);
196 };
197
198 static GTY ((cache)) hash_table<poly_int_cst_hasher> *poly_int_cst_hash_table;
199
200 /* Hash table for optimization flags and target option flags. Use the same
201 hash table for both sets of options. Nodes for building the current
202 optimization and target option nodes. The assumption is most of the time
203 the options created will already be in the hash table, so we avoid
204 allocating and freeing up a node repeatably. */
205 static GTY (()) tree cl_optimization_node;
206 static GTY (()) tree cl_target_option_node;
207
208 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node>
209 {
210 static hashval_t hash (tree t);
211 static bool equal (tree x, tree y);
212 };
213
214 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
215
216 /* General tree->tree mapping structure for use in hash tables. */
217
218
219 static GTY ((cache))
220 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
221
222 static GTY ((cache))
223 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
224
225 static GTY ((cache))
226 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
227
228 static void set_type_quals (tree, int);
229 static void print_type_hash_statistics (void);
230 static void print_debug_expr_statistics (void);
231 static void print_value_expr_statistics (void);
232
233 tree global_trees[TI_MAX];
234 tree integer_types[itk_none];
235
236 bool int_n_enabled_p[NUM_INT_N_ENTS];
237 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
238
239 bool tree_contains_struct[MAX_TREE_CODES][64];
240
241 /* Number of operands for each OMP clause. */
242 unsigned const char omp_clause_num_ops[] =
243 {
244 0, /* OMP_CLAUSE_ERROR */
245 1, /* OMP_CLAUSE_PRIVATE */
246 1, /* OMP_CLAUSE_SHARED */
247 1, /* OMP_CLAUSE_FIRSTPRIVATE */
248 2, /* OMP_CLAUSE_LASTPRIVATE */
249 5, /* OMP_CLAUSE_REDUCTION */
250 5, /* OMP_CLAUSE_TASK_REDUCTION */
251 5, /* OMP_CLAUSE_IN_REDUCTION */
252 1, /* OMP_CLAUSE_COPYIN */
253 1, /* OMP_CLAUSE_COPYPRIVATE */
254 3, /* OMP_CLAUSE_LINEAR */
255 1, /* OMP_CLAUSE_AFFINITY */
256 2, /* OMP_CLAUSE_ALIGNED */
257 3, /* OMP_CLAUSE_ALLOCATE */
258 1, /* OMP_CLAUSE_DEPEND */
259 1, /* OMP_CLAUSE_NONTEMPORAL */
260 1, /* OMP_CLAUSE_UNIFORM */
261 1, /* OMP_CLAUSE_ENTER */
262 1, /* OMP_CLAUSE_LINK */
263 1, /* OMP_CLAUSE_DETACH */
264 1, /* OMP_CLAUSE_USE_DEVICE_PTR */
265 1, /* OMP_CLAUSE_USE_DEVICE_ADDR */
266 1, /* OMP_CLAUSE_IS_DEVICE_PTR */
267 1, /* OMP_CLAUSE_INCLUSIVE */
268 1, /* OMP_CLAUSE_EXCLUSIVE */
269 2, /* OMP_CLAUSE_FROM */
270 2, /* OMP_CLAUSE_TO */
271 2, /* OMP_CLAUSE_MAP */
272 1, /* OMP_CLAUSE_HAS_DEVICE_ADDR */
273 1, /* OMP_CLAUSE_DOACROSS */
274 2, /* OMP_CLAUSE__CACHE_ */
275 2, /* OMP_CLAUSE_GANG */
276 1, /* OMP_CLAUSE_ASYNC */
277 1, /* OMP_CLAUSE_WAIT */
278 0, /* OMP_CLAUSE_AUTO */
279 0, /* OMP_CLAUSE_SEQ */
280 1, /* OMP_CLAUSE__LOOPTEMP_ */
281 1, /* OMP_CLAUSE__REDUCTEMP_ */
282 1, /* OMP_CLAUSE__CONDTEMP_ */
283 1, /* OMP_CLAUSE__SCANTEMP_ */
284 1, /* OMP_CLAUSE_IF */
285 1, /* OMP_CLAUSE_NUM_THREADS */
286 1, /* OMP_CLAUSE_SCHEDULE */
287 0, /* OMP_CLAUSE_NOWAIT */
288 1, /* OMP_CLAUSE_ORDERED */
289 0, /* OMP_CLAUSE_DEFAULT */
290 3, /* OMP_CLAUSE_COLLAPSE */
291 0, /* OMP_CLAUSE_UNTIED */
292 1, /* OMP_CLAUSE_FINAL */
293 0, /* OMP_CLAUSE_MERGEABLE */
294 1, /* OMP_CLAUSE_DEVICE */
295 1, /* OMP_CLAUSE_DIST_SCHEDULE */
296 0, /* OMP_CLAUSE_INBRANCH */
297 0, /* OMP_CLAUSE_NOTINBRANCH */
298 2, /* OMP_CLAUSE_NUM_TEAMS */
299 1, /* OMP_CLAUSE_THREAD_LIMIT */
300 0, /* OMP_CLAUSE_PROC_BIND */
301 1, /* OMP_CLAUSE_SAFELEN */
302 1, /* OMP_CLAUSE_SIMDLEN */
303 0, /* OMP_CLAUSE_DEVICE_TYPE */
304 0, /* OMP_CLAUSE_FOR */
305 0, /* OMP_CLAUSE_PARALLEL */
306 0, /* OMP_CLAUSE_SECTIONS */
307 0, /* OMP_CLAUSE_TASKGROUP */
308 1, /* OMP_CLAUSE_PRIORITY */
309 1, /* OMP_CLAUSE_GRAINSIZE */
310 1, /* OMP_CLAUSE_NUM_TASKS */
311 0, /* OMP_CLAUSE_NOGROUP */
312 0, /* OMP_CLAUSE_THREADS */
313 0, /* OMP_CLAUSE_SIMD */
314 1, /* OMP_CLAUSE_HINT */
315 0, /* OMP_CLAUSE_DEFAULTMAP */
316 0, /* OMP_CLAUSE_ORDER */
317 0, /* OMP_CLAUSE_BIND */
318 1, /* OMP_CLAUSE_FILTER */
319 1, /* OMP_CLAUSE__SIMDUID_ */
320 0, /* OMP_CLAUSE__SIMT_ */
321 0, /* OMP_CLAUSE_INDEPENDENT */
322 1, /* OMP_CLAUSE_WORKER */
323 1, /* OMP_CLAUSE_VECTOR */
324 1, /* OMP_CLAUSE_NUM_GANGS */
325 1, /* OMP_CLAUSE_NUM_WORKERS */
326 1, /* OMP_CLAUSE_VECTOR_LENGTH */
327 3, /* OMP_CLAUSE_TILE */
328 0, /* OMP_CLAUSE_IF_PRESENT */
329 0, /* OMP_CLAUSE_FINALIZE */
330 0, /* OMP_CLAUSE_NOHOST */
331 };
332
333 const char * const omp_clause_code_name[] =
334 {
335 "error_clause",
336 "private",
337 "shared",
338 "firstprivate",
339 "lastprivate",
340 "reduction",
341 "task_reduction",
342 "in_reduction",
343 "copyin",
344 "copyprivate",
345 "linear",
346 "affinity",
347 "aligned",
348 "allocate",
349 "depend",
350 "nontemporal",
351 "uniform",
352 "enter",
353 "link",
354 "detach",
355 "use_device_ptr",
356 "use_device_addr",
357 "is_device_ptr",
358 "inclusive",
359 "exclusive",
360 "from",
361 "to",
362 "map",
363 "has_device_addr",
364 "doacross",
365 "_cache_",
366 "gang",
367 "async",
368 "wait",
369 "auto",
370 "seq",
371 "_looptemp_",
372 "_reductemp_",
373 "_condtemp_",
374 "_scantemp_",
375 "if",
376 "num_threads",
377 "schedule",
378 "nowait",
379 "ordered",
380 "default",
381 "collapse",
382 "untied",
383 "final",
384 "mergeable",
385 "device",
386 "dist_schedule",
387 "inbranch",
388 "notinbranch",
389 "num_teams",
390 "thread_limit",
391 "proc_bind",
392 "safelen",
393 "simdlen",
394 "device_type",
395 "for",
396 "parallel",
397 "sections",
398 "taskgroup",
399 "priority",
400 "grainsize",
401 "num_tasks",
402 "nogroup",
403 "threads",
404 "simd",
405 "hint",
406 "defaultmap",
407 "order",
408 "bind",
409 "filter",
410 "_simduid_",
411 "_simt_",
412 "independent",
413 "worker",
414 "vector",
415 "num_gangs",
416 "num_workers",
417 "vector_length",
418 "tile",
419 "if_present",
420 "finalize",
421 "nohost",
422 };
423
424 /* Unless specific to OpenACC, we tend to internally maintain OpenMP-centric
425 clause names, but for use in diagnostics etc. would like to use the "user"
426 clause names. */
427
428 const char *
429 user_omp_clause_code_name (tree clause, bool oacc)
430 {
431 /* For OpenACC, the 'OMP_CLAUSE_MAP_KIND' of an 'OMP_CLAUSE_MAP' is used to
432 distinguish clauses as seen by the user. See also where front ends do
433 'build_omp_clause' with 'OMP_CLAUSE_MAP'. */
434 if (oacc && OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP)
435 switch (OMP_CLAUSE_MAP_KIND (clause))
436 {
437 case GOMP_MAP_FORCE_ALLOC:
438 case GOMP_MAP_ALLOC: return "create";
439 case GOMP_MAP_FORCE_TO:
440 case GOMP_MAP_TO: return "copyin";
441 case GOMP_MAP_FORCE_FROM:
442 case GOMP_MAP_FROM: return "copyout";
443 case GOMP_MAP_FORCE_TOFROM:
444 case GOMP_MAP_TOFROM: return "copy";
445 case GOMP_MAP_RELEASE: return "delete";
446 case GOMP_MAP_FORCE_PRESENT: return "present";
447 case GOMP_MAP_ATTACH: return "attach";
448 case GOMP_MAP_FORCE_DETACH:
449 case GOMP_MAP_DETACH: return "detach";
450 case GOMP_MAP_DEVICE_RESIDENT: return "device_resident";
451 case GOMP_MAP_LINK: return "link";
452 case GOMP_MAP_FORCE_DEVICEPTR: return "deviceptr";
453 default: break;
454 }
455
456 return omp_clause_code_name[OMP_CLAUSE_CODE (clause)];
457 }
458
459
460 /* Return the tree node structure used by tree code CODE. */
461
462 static inline enum tree_node_structure_enum
463 tree_node_structure_for_code (enum tree_code code)
464 {
465 switch (TREE_CODE_CLASS (code))
466 {
467 case tcc_declaration:
468 switch (code)
469 {
470 case CONST_DECL: return TS_CONST_DECL;
471 case DEBUG_EXPR_DECL: return TS_DECL_WRTL;
472 case FIELD_DECL: return TS_FIELD_DECL;
473 case FUNCTION_DECL: return TS_FUNCTION_DECL;
474 case LABEL_DECL: return TS_LABEL_DECL;
475 case PARM_DECL: return TS_PARM_DECL;
476 case RESULT_DECL: return TS_RESULT_DECL;
477 case TRANSLATION_UNIT_DECL: return TS_TRANSLATION_UNIT_DECL;
478 case TYPE_DECL: return TS_TYPE_DECL;
479 case VAR_DECL: return TS_VAR_DECL;
480 default: return TS_DECL_NON_COMMON;
481 }
482
483 case tcc_type: return TS_TYPE_NON_COMMON;
484
485 case tcc_binary:
486 case tcc_comparison:
487 case tcc_expression:
488 case tcc_reference:
489 case tcc_statement:
490 case tcc_unary:
491 case tcc_vl_exp: return TS_EXP;
492
493 default: /* tcc_constant and tcc_exceptional */
494 break;
495 }
496
497 switch (code)
498 {
499 /* tcc_constant cases. */
500 case COMPLEX_CST: return TS_COMPLEX;
501 case FIXED_CST: return TS_FIXED_CST;
502 case INTEGER_CST: return TS_INT_CST;
503 case POLY_INT_CST: return TS_POLY_INT_CST;
504 case REAL_CST: return TS_REAL_CST;
505 case STRING_CST: return TS_STRING;
506 case VECTOR_CST: return TS_VECTOR;
507 case VOID_CST: return TS_TYPED;
508
509 /* tcc_exceptional cases. */
510 case BLOCK: return TS_BLOCK;
511 case CONSTRUCTOR: return TS_CONSTRUCTOR;
512 case ERROR_MARK: return TS_COMMON;
513 case IDENTIFIER_NODE: return TS_IDENTIFIER;
514 case OMP_CLAUSE: return TS_OMP_CLAUSE;
515 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
516 case PLACEHOLDER_EXPR: return TS_COMMON;
517 case SSA_NAME: return TS_SSA_NAME;
518 case STATEMENT_LIST: return TS_STATEMENT_LIST;
519 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
520 case TREE_BINFO: return TS_BINFO;
521 case TREE_LIST: return TS_LIST;
522 case TREE_VEC: return TS_VEC;
523
524 default:
525 gcc_unreachable ();
526 }
527 }
528
529
530 /* Initialize tree_contains_struct to describe the hierarchy of tree
531 nodes. */
532
533 static void
534 initialize_tree_contains_struct (void)
535 {
536 unsigned i;
537
538 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
539 {
540 enum tree_code code;
541 enum tree_node_structure_enum ts_code;
542
543 code = (enum tree_code) i;
544 ts_code = tree_node_structure_for_code (code);
545
546 /* Mark the TS structure itself. */
547 tree_contains_struct[code][ts_code] = 1;
548
549 /* Mark all the structures that TS is derived from. */
550 switch (ts_code)
551 {
552 case TS_TYPED:
553 case TS_BLOCK:
554 case TS_OPTIMIZATION:
555 case TS_TARGET_OPTION:
556 MARK_TS_BASE (code);
557 break;
558
559 case TS_COMMON:
560 case TS_INT_CST:
561 case TS_POLY_INT_CST:
562 case TS_REAL_CST:
563 case TS_FIXED_CST:
564 case TS_VECTOR:
565 case TS_STRING:
566 case TS_COMPLEX:
567 case TS_SSA_NAME:
568 case TS_CONSTRUCTOR:
569 case TS_EXP:
570 case TS_STATEMENT_LIST:
571 MARK_TS_TYPED (code);
572 break;
573
574 case TS_IDENTIFIER:
575 case TS_DECL_MINIMAL:
576 case TS_TYPE_COMMON:
577 case TS_LIST:
578 case TS_VEC:
579 case TS_BINFO:
580 case TS_OMP_CLAUSE:
581 MARK_TS_COMMON (code);
582 break;
583
584 case TS_TYPE_WITH_LANG_SPECIFIC:
585 MARK_TS_TYPE_COMMON (code);
586 break;
587
588 case TS_TYPE_NON_COMMON:
589 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
590 break;
591
592 case TS_DECL_COMMON:
593 MARK_TS_DECL_MINIMAL (code);
594 break;
595
596 case TS_DECL_WRTL:
597 case TS_CONST_DECL:
598 MARK_TS_DECL_COMMON (code);
599 break;
600
601 case TS_DECL_NON_COMMON:
602 MARK_TS_DECL_WITH_VIS (code);
603 break;
604
605 case TS_DECL_WITH_VIS:
606 case TS_PARM_DECL:
607 case TS_LABEL_DECL:
608 case TS_RESULT_DECL:
609 MARK_TS_DECL_WRTL (code);
610 break;
611
612 case TS_FIELD_DECL:
613 MARK_TS_DECL_COMMON (code);
614 break;
615
616 case TS_VAR_DECL:
617 MARK_TS_DECL_WITH_VIS (code);
618 break;
619
620 case TS_TYPE_DECL:
621 case TS_FUNCTION_DECL:
622 MARK_TS_DECL_NON_COMMON (code);
623 break;
624
625 case TS_TRANSLATION_UNIT_DECL:
626 MARK_TS_DECL_COMMON (code);
627 break;
628
629 default:
630 gcc_unreachable ();
631 }
632 }
633
634 /* Basic consistency checks for attributes used in fold. */
635 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
636 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
637 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
638 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
639 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
640 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
641 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
642 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
643 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
644 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
645 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
646 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
647 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
648 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
649 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
650 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
651 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
652 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
653 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
654 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
655 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
656 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
657 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
658 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
659 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
660 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
661 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
662 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
663 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
664 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
665 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
666 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
667 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
668 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
669 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
670 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
671 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
672 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
673 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
674 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
675 }
676
677
678 /* Init tree.cc. */
679
680 void
681 init_ttree (void)
682 {
683 /* Initialize the hash table of types. */
684 type_hash_table
685 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
686
687 debug_expr_for_decl
688 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
689
690 value_expr_for_decl
691 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
692
693 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
694
695 poly_int_cst_hash_table = hash_table<poly_int_cst_hasher>::create_ggc (64);
696
697 int_cst_node = make_int_cst (1, 1);
698
699 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
700
701 cl_optimization_node = make_node (OPTIMIZATION_NODE);
702 cl_target_option_node = make_node (TARGET_OPTION_NODE);
703
704 /* Initialize the tree_contains_struct array. */
705 initialize_tree_contains_struct ();
706 lang_hooks.init_ts ();
707 }
708
709 \f
710 /* The name of the object as the assembler will see it (but before any
711 translations made by ASM_OUTPUT_LABELREF). Often this is the same
712 as DECL_NAME. It is an IDENTIFIER_NODE. */
713 tree
714 decl_assembler_name (tree decl)
715 {
716 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
717 lang_hooks.set_decl_assembler_name (decl);
718 return DECL_ASSEMBLER_NAME_RAW (decl);
719 }
720
721 /* The DECL_ASSEMBLER_NAME_RAW of DECL is being explicitly set to NAME
722 (either of which may be NULL). Inform the FE, if this changes the
723 name. */
724
725 void
726 overwrite_decl_assembler_name (tree decl, tree name)
727 {
728 if (DECL_ASSEMBLER_NAME_RAW (decl) != name)
729 lang_hooks.overwrite_decl_assembler_name (decl, name);
730 }
731
732 /* Return true if DECL may need an assembler name to be set. */
733
734 static inline bool
735 need_assembler_name_p (tree decl)
736 {
737 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
738 Rule merging. This makes type_odr_p to return true on those types during
739 LTO and by comparing the mangled name, we can say what types are intended
740 to be equivalent across compilation unit.
741
742 We do not store names of type_in_anonymous_namespace_p.
743
744 Record, union and enumeration type have linkage that allows use
745 to check type_in_anonymous_namespace_p. We do not mangle compound types
746 that always can be compared structurally.
747
748 Similarly for builtin types, we compare properties of their main variant.
749 A special case are integer types where mangling do make differences
750 between char/signed char/unsigned char etc. Storing name for these makes
751 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
752 See cp/mangle.cc:write_builtin_type for details. */
753
754 if (TREE_CODE (decl) == TYPE_DECL)
755 {
756 if (DECL_NAME (decl)
757 && decl == TYPE_NAME (TREE_TYPE (decl))
758 && TYPE_MAIN_VARIANT (TREE_TYPE (decl)) == TREE_TYPE (decl)
759 && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
760 && ((TREE_CODE (TREE_TYPE (decl)) != RECORD_TYPE
761 && TREE_CODE (TREE_TYPE (decl)) != UNION_TYPE)
762 || TYPE_CXX_ODR_P (TREE_TYPE (decl)))
763 && (type_with_linkage_p (TREE_TYPE (decl))
764 || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
765 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
766 return !DECL_ASSEMBLER_NAME_SET_P (decl);
767 return false;
768 }
769 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
770 if (!VAR_OR_FUNCTION_DECL_P (decl))
771 return false;
772
773 /* If DECL already has its assembler name set, it does not need a
774 new one. */
775 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
776 || DECL_ASSEMBLER_NAME_SET_P (decl))
777 return false;
778
779 /* Abstract decls do not need an assembler name. */
780 if (DECL_ABSTRACT_P (decl))
781 return false;
782
783 /* For VAR_DECLs, only static, public and external symbols need an
784 assembler name. */
785 if (VAR_P (decl)
786 && !TREE_STATIC (decl)
787 && !TREE_PUBLIC (decl)
788 && !DECL_EXTERNAL (decl))
789 return false;
790
791 if (TREE_CODE (decl) == FUNCTION_DECL)
792 {
793 /* Do not set assembler name on builtins. Allow RTL expansion to
794 decide whether to expand inline or via a regular call. */
795 if (fndecl_built_in_p (decl)
796 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
797 return false;
798
799 /* Functions represented in the callgraph need an assembler name. */
800 if (cgraph_node::get (decl) != NULL)
801 return true;
802
803 /* Unused and not public functions don't need an assembler name. */
804 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
805 return false;
806 }
807
808 return true;
809 }
810
811 /* If T needs an assembler name, have one created for it. */
812
813 void
814 assign_assembler_name_if_needed (tree t)
815 {
816 if (need_assembler_name_p (t))
817 {
818 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
819 diagnostics that use input_location to show locus
820 information. The problem here is that, at this point,
821 input_location is generally anchored to the end of the file
822 (since the parser is long gone), so we don't have a good
823 position to pin it to.
824
825 To alleviate this problem, this uses the location of T's
826 declaration. Examples of this are
827 testsuite/g++.dg/template/cond2.C and
828 testsuite/g++.dg/template/pr35240.C. */
829 location_t saved_location = input_location;
830 input_location = DECL_SOURCE_LOCATION (t);
831
832 decl_assembler_name (t);
833
834 input_location = saved_location;
835 }
836 }
837
838 /* When the target supports COMDAT groups, this indicates which group the
839 DECL is associated with. This can be either an IDENTIFIER_NODE or a
840 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
841 tree
842 decl_comdat_group (const_tree node)
843 {
844 struct symtab_node *snode = symtab_node::get (node);
845 if (!snode)
846 return NULL;
847 return snode->get_comdat_group ();
848 }
849
850 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
851 tree
852 decl_comdat_group_id (const_tree node)
853 {
854 struct symtab_node *snode = symtab_node::get (node);
855 if (!snode)
856 return NULL;
857 return snode->get_comdat_group_id ();
858 }
859
860 /* When the target supports named section, return its name as IDENTIFIER_NODE
861 or NULL if it is in no section. */
862 const char *
863 decl_section_name (const_tree node)
864 {
865 struct symtab_node *snode = symtab_node::get (node);
866 if (!snode)
867 return NULL;
868 return snode->get_section ();
869 }
870
871 /* Set section name of NODE to VALUE (that is expected to be
872 identifier node) */
873 void
874 set_decl_section_name (tree node, const char *value)
875 {
876 struct symtab_node *snode;
877
878 if (value == NULL)
879 {
880 snode = symtab_node::get (node);
881 if (!snode)
882 return;
883 }
884 else if (VAR_P (node))
885 snode = varpool_node::get_create (node);
886 else
887 snode = cgraph_node::get_create (node);
888 snode->set_section (value);
889 }
890
891 /* Set section name of NODE to match the section name of OTHER.
892
893 set_decl_section_name (decl, other) is equivalent to
894 set_decl_section_name (decl, DECL_SECTION_NAME (other)), but possibly more
895 efficient. */
896 void
897 set_decl_section_name (tree decl, const_tree other)
898 {
899 struct symtab_node *other_node = symtab_node::get (other);
900 if (other_node)
901 {
902 struct symtab_node *decl_node;
903 if (VAR_P (decl))
904 decl_node = varpool_node::get_create (decl);
905 else
906 decl_node = cgraph_node::get_create (decl);
907 decl_node->set_section (*other_node);
908 }
909 else
910 {
911 struct symtab_node *decl_node = symtab_node::get (decl);
912 if (!decl_node)
913 return;
914 decl_node->set_section (NULL);
915 }
916 }
917
918 /* Return TLS model of a variable NODE. */
919 enum tls_model
920 decl_tls_model (const_tree node)
921 {
922 struct varpool_node *snode = varpool_node::get (node);
923 if (!snode)
924 return TLS_MODEL_NONE;
925 return snode->tls_model;
926 }
927
928 /* Set TLS model of variable NODE to MODEL. */
929 void
930 set_decl_tls_model (tree node, enum tls_model model)
931 {
932 struct varpool_node *vnode;
933
934 if (model == TLS_MODEL_NONE)
935 {
936 vnode = varpool_node::get (node);
937 if (!vnode)
938 return;
939 }
940 else
941 vnode = varpool_node::get_create (node);
942 vnode->tls_model = model;
943 }
944
945 /* Compute the number of bytes occupied by a tree with code CODE.
946 This function cannot be used for nodes that have variable sizes,
947 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
948 size_t
949 tree_code_size (enum tree_code code)
950 {
951 switch (TREE_CODE_CLASS (code))
952 {
953 case tcc_declaration: /* A decl node */
954 switch (code)
955 {
956 case FIELD_DECL: return sizeof (tree_field_decl);
957 case PARM_DECL: return sizeof (tree_parm_decl);
958 case VAR_DECL: return sizeof (tree_var_decl);
959 case LABEL_DECL: return sizeof (tree_label_decl);
960 case RESULT_DECL: return sizeof (tree_result_decl);
961 case CONST_DECL: return sizeof (tree_const_decl);
962 case TYPE_DECL: return sizeof (tree_type_decl);
963 case FUNCTION_DECL: return sizeof (tree_function_decl);
964 case DEBUG_EXPR_DECL: return sizeof (tree_decl_with_rtl);
965 case TRANSLATION_UNIT_DECL: return sizeof (tree_translation_unit_decl);
966 case NAMESPACE_DECL:
967 case IMPORTED_DECL:
968 case NAMELIST_DECL: return sizeof (tree_decl_non_common);
969 default:
970 gcc_checking_assert (code >= NUM_TREE_CODES);
971 return lang_hooks.tree_size (code);
972 }
973
974 case tcc_type: /* a type node */
975 switch (code)
976 {
977 case OFFSET_TYPE:
978 case ENUMERAL_TYPE:
979 case BOOLEAN_TYPE:
980 case INTEGER_TYPE:
981 case REAL_TYPE:
982 case OPAQUE_TYPE:
983 case POINTER_TYPE:
984 case REFERENCE_TYPE:
985 case NULLPTR_TYPE:
986 case FIXED_POINT_TYPE:
987 case COMPLEX_TYPE:
988 case VECTOR_TYPE:
989 case ARRAY_TYPE:
990 case RECORD_TYPE:
991 case UNION_TYPE:
992 case QUAL_UNION_TYPE:
993 case VOID_TYPE:
994 case FUNCTION_TYPE:
995 case METHOD_TYPE:
996 case LANG_TYPE: return sizeof (tree_type_non_common);
997 default:
998 gcc_checking_assert (code >= NUM_TREE_CODES);
999 return lang_hooks.tree_size (code);
1000 }
1001
1002 case tcc_reference: /* a reference */
1003 case tcc_expression: /* an expression */
1004 case tcc_statement: /* an expression with side effects */
1005 case tcc_comparison: /* a comparison expression */
1006 case tcc_unary: /* a unary arithmetic expression */
1007 case tcc_binary: /* a binary arithmetic expression */
1008 return (sizeof (struct tree_exp)
1009 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
1010
1011 case tcc_constant: /* a constant */
1012 switch (code)
1013 {
1014 case VOID_CST: return sizeof (tree_typed);
1015 case INTEGER_CST: gcc_unreachable ();
1016 case POLY_INT_CST: return sizeof (tree_poly_int_cst);
1017 case REAL_CST: return sizeof (tree_real_cst);
1018 case FIXED_CST: return sizeof (tree_fixed_cst);
1019 case COMPLEX_CST: return sizeof (tree_complex);
1020 case VECTOR_CST: gcc_unreachable ();
1021 case STRING_CST: gcc_unreachable ();
1022 default:
1023 gcc_checking_assert (code >= NUM_TREE_CODES);
1024 return lang_hooks.tree_size (code);
1025 }
1026
1027 case tcc_exceptional: /* something random, like an identifier. */
1028 switch (code)
1029 {
1030 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
1031 case TREE_LIST: return sizeof (tree_list);
1032
1033 case ERROR_MARK:
1034 case PLACEHOLDER_EXPR: return sizeof (tree_common);
1035
1036 case TREE_VEC: gcc_unreachable ();
1037 case OMP_CLAUSE: gcc_unreachable ();
1038
1039 case SSA_NAME: return sizeof (tree_ssa_name);
1040
1041 case STATEMENT_LIST: return sizeof (tree_statement_list);
1042 case BLOCK: return sizeof (struct tree_block);
1043 case CONSTRUCTOR: return sizeof (tree_constructor);
1044 case OPTIMIZATION_NODE: return sizeof (tree_optimization_option);
1045 case TARGET_OPTION_NODE: return sizeof (tree_target_option);
1046
1047 default:
1048 gcc_checking_assert (code >= NUM_TREE_CODES);
1049 return lang_hooks.tree_size (code);
1050 }
1051
1052 default:
1053 gcc_unreachable ();
1054 }
1055 }
1056
1057 /* Compute the number of bytes occupied by NODE. This routine only
1058 looks at TREE_CODE, except for those nodes that have variable sizes. */
1059 size_t
1060 tree_size (const_tree node)
1061 {
1062 const enum tree_code code = TREE_CODE (node);
1063 switch (code)
1064 {
1065 case INTEGER_CST:
1066 return (sizeof (struct tree_int_cst)
1067 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
1068
1069 case TREE_BINFO:
1070 return (offsetof (struct tree_binfo, base_binfos)
1071 + vec<tree, va_gc>
1072 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
1073
1074 case TREE_VEC:
1075 return (sizeof (struct tree_vec)
1076 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
1077
1078 case VECTOR_CST:
1079 return (sizeof (struct tree_vector)
1080 + (vector_cst_encoded_nelts (node) - 1) * sizeof (tree));
1081
1082 case STRING_CST:
1083 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
1084
1085 case OMP_CLAUSE:
1086 return (sizeof (struct tree_omp_clause)
1087 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
1088 * sizeof (tree));
1089
1090 default:
1091 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
1092 return (sizeof (struct tree_exp)
1093 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
1094 else
1095 return tree_code_size (code);
1096 }
1097 }
1098
1099 /* Return tree node kind based on tree CODE. */
1100
1101 static tree_node_kind
1102 get_stats_node_kind (enum tree_code code)
1103 {
1104 enum tree_code_class type = TREE_CODE_CLASS (code);
1105
1106 switch (type)
1107 {
1108 case tcc_declaration: /* A decl node */
1109 return d_kind;
1110 case tcc_type: /* a type node */
1111 return t_kind;
1112 case tcc_statement: /* an expression with side effects */
1113 return s_kind;
1114 case tcc_reference: /* a reference */
1115 return r_kind;
1116 case tcc_expression: /* an expression */
1117 case tcc_comparison: /* a comparison expression */
1118 case tcc_unary: /* a unary arithmetic expression */
1119 case tcc_binary: /* a binary arithmetic expression */
1120 return e_kind;
1121 case tcc_constant: /* a constant */
1122 return c_kind;
1123 case tcc_exceptional: /* something random, like an identifier. */
1124 switch (code)
1125 {
1126 case IDENTIFIER_NODE:
1127 return id_kind;
1128 case TREE_VEC:
1129 return vec_kind;
1130 case TREE_BINFO:
1131 return binfo_kind;
1132 case SSA_NAME:
1133 return ssa_name_kind;
1134 case BLOCK:
1135 return b_kind;
1136 case CONSTRUCTOR:
1137 return constr_kind;
1138 case OMP_CLAUSE:
1139 return omp_clause_kind;
1140 default:
1141 return x_kind;
1142 }
1143 break;
1144 case tcc_vl_exp:
1145 return e_kind;
1146 default:
1147 gcc_unreachable ();
1148 }
1149 }
1150
1151 /* Record interesting allocation statistics for a tree node with CODE
1152 and LENGTH. */
1153
1154 static void
1155 record_node_allocation_statistics (enum tree_code code, size_t length)
1156 {
1157 if (!GATHER_STATISTICS)
1158 return;
1159
1160 tree_node_kind kind = get_stats_node_kind (code);
1161
1162 tree_code_counts[(int) code]++;
1163 tree_node_counts[(int) kind]++;
1164 tree_node_sizes[(int) kind] += length;
1165 }
1166
1167 /* Allocate and return a new UID from the DECL_UID namespace. */
1168
1169 int
1170 allocate_decl_uid (void)
1171 {
1172 return next_decl_uid++;
1173 }
1174
1175 /* Return a newly allocated node of code CODE. For decl and type
1176 nodes, some other fields are initialized. The rest of the node is
1177 initialized to zero. This function cannot be used for TREE_VEC,
1178 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1179 tree_code_size.
1180
1181 Achoo! I got a code in the node. */
1182
1183 tree
1184 make_node (enum tree_code code MEM_STAT_DECL)
1185 {
1186 tree t;
1187 enum tree_code_class type = TREE_CODE_CLASS (code);
1188 size_t length = tree_code_size (code);
1189
1190 record_node_allocation_statistics (code, length);
1191
1192 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1193 TREE_SET_CODE (t, code);
1194
1195 switch (type)
1196 {
1197 case tcc_statement:
1198 if (code != DEBUG_BEGIN_STMT)
1199 TREE_SIDE_EFFECTS (t) = 1;
1200 break;
1201
1202 case tcc_declaration:
1203 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1204 {
1205 if (code == FUNCTION_DECL)
1206 {
1207 SET_DECL_ALIGN (t, FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY));
1208 SET_DECL_MODE (t, FUNCTION_MODE);
1209 }
1210 else
1211 SET_DECL_ALIGN (t, 1);
1212 }
1213 DECL_SOURCE_LOCATION (t) = input_location;
1214 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1215 DECL_UID (t) = --next_debug_decl_uid;
1216 else
1217 {
1218 DECL_UID (t) = allocate_decl_uid ();
1219 SET_DECL_PT_UID (t, -1);
1220 }
1221 if (TREE_CODE (t) == LABEL_DECL)
1222 LABEL_DECL_UID (t) = -1;
1223
1224 break;
1225
1226 case tcc_type:
1227 TYPE_UID (t) = next_type_uid++;
1228 SET_TYPE_ALIGN (t, BITS_PER_UNIT);
1229 TYPE_USER_ALIGN (t) = 0;
1230 TYPE_MAIN_VARIANT (t) = t;
1231 TYPE_CANONICAL (t) = t;
1232
1233 /* Default to no attributes for type, but let target change that. */
1234 TYPE_ATTRIBUTES (t) = NULL_TREE;
1235 targetm.set_default_type_attributes (t);
1236
1237 /* We have not yet computed the alias set for this type. */
1238 TYPE_ALIAS_SET (t) = -1;
1239 break;
1240
1241 case tcc_constant:
1242 TREE_CONSTANT (t) = 1;
1243 break;
1244
1245 case tcc_expression:
1246 switch (code)
1247 {
1248 case INIT_EXPR:
1249 case MODIFY_EXPR:
1250 case VA_ARG_EXPR:
1251 case PREDECREMENT_EXPR:
1252 case PREINCREMENT_EXPR:
1253 case POSTDECREMENT_EXPR:
1254 case POSTINCREMENT_EXPR:
1255 /* All of these have side-effects, no matter what their
1256 operands are. */
1257 TREE_SIDE_EFFECTS (t) = 1;
1258 break;
1259
1260 default:
1261 break;
1262 }
1263 break;
1264
1265 case tcc_exceptional:
1266 switch (code)
1267 {
1268 case TARGET_OPTION_NODE:
1269 TREE_TARGET_OPTION(t)
1270 = ggc_cleared_alloc<struct cl_target_option> ();
1271 break;
1272
1273 case OPTIMIZATION_NODE:
1274 TREE_OPTIMIZATION (t)
1275 = ggc_cleared_alloc<struct cl_optimization> ();
1276 break;
1277
1278 default:
1279 break;
1280 }
1281 break;
1282
1283 default:
1284 /* Other classes need no special treatment. */
1285 break;
1286 }
1287
1288 return t;
1289 }
1290
1291 /* Free tree node. */
1292
1293 void
1294 free_node (tree node)
1295 {
1296 enum tree_code code = TREE_CODE (node);
1297 if (GATHER_STATISTICS)
1298 {
1299 enum tree_node_kind kind = get_stats_node_kind (code);
1300
1301 gcc_checking_assert (tree_code_counts[(int) TREE_CODE (node)] != 0);
1302 gcc_checking_assert (tree_node_counts[(int) kind] != 0);
1303 gcc_checking_assert (tree_node_sizes[(int) kind] >= tree_size (node));
1304
1305 tree_code_counts[(int) TREE_CODE (node)]--;
1306 tree_node_counts[(int) kind]--;
1307 tree_node_sizes[(int) kind] -= tree_size (node);
1308 }
1309 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1310 vec_free (CONSTRUCTOR_ELTS (node));
1311 else if (code == BLOCK)
1312 vec_free (BLOCK_NONLOCALIZED_VARS (node));
1313 else if (code == TREE_BINFO)
1314 vec_free (BINFO_BASE_ACCESSES (node));
1315 else if (code == OPTIMIZATION_NODE)
1316 cl_optimization_option_free (TREE_OPTIMIZATION (node));
1317 else if (code == TARGET_OPTION_NODE)
1318 cl_target_option_free (TREE_TARGET_OPTION (node));
1319 ggc_free (node);
1320 }
1321 \f
1322 /* Return a new node with the same contents as NODE except that its
1323 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1324
1325 tree
1326 copy_node (tree node MEM_STAT_DECL)
1327 {
1328 tree t;
1329 enum tree_code code = TREE_CODE (node);
1330 size_t length;
1331
1332 gcc_assert (code != STATEMENT_LIST);
1333
1334 length = tree_size (node);
1335 record_node_allocation_statistics (code, length);
1336 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1337 memcpy (t, node, length);
1338
1339 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1340 TREE_CHAIN (t) = 0;
1341 TREE_ASM_WRITTEN (t) = 0;
1342 TREE_VISITED (t) = 0;
1343
1344 if (TREE_CODE_CLASS (code) == tcc_declaration)
1345 {
1346 if (code == DEBUG_EXPR_DECL)
1347 DECL_UID (t) = --next_debug_decl_uid;
1348 else
1349 {
1350 DECL_UID (t) = allocate_decl_uid ();
1351 if (DECL_PT_UID_SET_P (node))
1352 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1353 }
1354 if ((TREE_CODE (node) == PARM_DECL || VAR_P (node))
1355 && DECL_HAS_VALUE_EXPR_P (node))
1356 {
1357 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1358 DECL_HAS_VALUE_EXPR_P (t) = 1;
1359 }
1360 /* DECL_DEBUG_EXPR is copied explicitly by callers. */
1361 if (VAR_P (node))
1362 {
1363 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1364 t->decl_with_vis.symtab_node = NULL;
1365 }
1366 if (VAR_P (node) && DECL_HAS_INIT_PRIORITY_P (node))
1367 {
1368 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1369 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1370 }
1371 if (TREE_CODE (node) == FUNCTION_DECL)
1372 {
1373 DECL_STRUCT_FUNCTION (t) = NULL;
1374 t->decl_with_vis.symtab_node = NULL;
1375 }
1376 }
1377 else if (TREE_CODE_CLASS (code) == tcc_type)
1378 {
1379 TYPE_UID (t) = next_type_uid++;
1380 /* The following is so that the debug code for
1381 the copy is different from the original type.
1382 The two statements usually duplicate each other
1383 (because they clear fields of the same union),
1384 but the optimizer should catch that. */
1385 TYPE_SYMTAB_ADDRESS (t) = 0;
1386 TYPE_SYMTAB_DIE (t) = 0;
1387
1388 /* Do not copy the values cache. */
1389 if (TYPE_CACHED_VALUES_P (t))
1390 {
1391 TYPE_CACHED_VALUES_P (t) = 0;
1392 TYPE_CACHED_VALUES (t) = NULL_TREE;
1393 }
1394 }
1395 else if (code == TARGET_OPTION_NODE)
1396 {
1397 TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>();
1398 memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node),
1399 sizeof (struct cl_target_option));
1400 }
1401 else if (code == OPTIMIZATION_NODE)
1402 {
1403 TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>();
1404 memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node),
1405 sizeof (struct cl_optimization));
1406 }
1407
1408 return t;
1409 }
1410
1411 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1412 For example, this can copy a list made of TREE_LIST nodes. */
1413
1414 tree
1415 copy_list (tree list)
1416 {
1417 tree head;
1418 tree prev, next;
1419
1420 if (list == 0)
1421 return 0;
1422
1423 head = prev = copy_node (list);
1424 next = TREE_CHAIN (list);
1425 while (next)
1426 {
1427 TREE_CHAIN (prev) = copy_node (next);
1428 prev = TREE_CHAIN (prev);
1429 next = TREE_CHAIN (next);
1430 }
1431 return head;
1432 }
1433
1434 \f
1435 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1436 INTEGER_CST with value CST and type TYPE. */
1437
1438 static unsigned int
1439 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1440 {
1441 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1442 /* We need extra HWIs if CST is an unsigned integer with its
1443 upper bit set. */
1444 if (TYPE_UNSIGNED (type) && wi::neg_p (cst))
1445 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1446 return cst.get_len ();
1447 }
1448
1449 /* Return a new INTEGER_CST with value CST and type TYPE. */
1450
1451 static tree
1452 build_new_int_cst (tree type, const wide_int &cst)
1453 {
1454 unsigned int len = cst.get_len ();
1455 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1456 tree nt = make_int_cst (len, ext_len);
1457
1458 if (len < ext_len)
1459 {
1460 --ext_len;
1461 TREE_INT_CST_ELT (nt, ext_len)
1462 = zext_hwi (-1, cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1463 for (unsigned int i = len; i < ext_len; ++i)
1464 TREE_INT_CST_ELT (nt, i) = -1;
1465 }
1466 else if (TYPE_UNSIGNED (type)
1467 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1468 {
1469 len--;
1470 TREE_INT_CST_ELT (nt, len)
1471 = zext_hwi (cst.elt (len),
1472 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1473 }
1474
1475 for (unsigned int i = 0; i < len; i++)
1476 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1477 TREE_TYPE (nt) = type;
1478 return nt;
1479 }
1480
1481 /* Return a new POLY_INT_CST with coefficients COEFFS and type TYPE. */
1482
1483 static tree
1484 build_new_poly_int_cst (tree type, tree (&coeffs)[NUM_POLY_INT_COEFFS]
1485 CXX_MEM_STAT_INFO)
1486 {
1487 size_t length = sizeof (struct tree_poly_int_cst);
1488 record_node_allocation_statistics (POLY_INT_CST, length);
1489
1490 tree t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1491
1492 TREE_SET_CODE (t, POLY_INT_CST);
1493 TREE_CONSTANT (t) = 1;
1494 TREE_TYPE (t) = type;
1495 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1496 POLY_INT_CST_COEFF (t, i) = coeffs[i];
1497 return t;
1498 }
1499
1500 /* Create a constant tree that contains CST sign-extended to TYPE. */
1501
1502 tree
1503 build_int_cst (tree type, poly_int64 cst)
1504 {
1505 /* Support legacy code. */
1506 if (!type)
1507 type = integer_type_node;
1508
1509 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1510 }
1511
1512 /* Create a constant tree that contains CST zero-extended to TYPE. */
1513
1514 tree
1515 build_int_cstu (tree type, poly_uint64 cst)
1516 {
1517 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1518 }
1519
1520 /* Create a constant tree that contains CST sign-extended to TYPE. */
1521
1522 tree
1523 build_int_cst_type (tree type, poly_int64 cst)
1524 {
1525 gcc_assert (type);
1526 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1527 }
1528
1529 /* Constructs tree in type TYPE from with value given by CST. Signedness
1530 of CST is assumed to be the same as the signedness of TYPE. */
1531
1532 tree
1533 double_int_to_tree (tree type, double_int cst)
1534 {
1535 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1536 }
1537
1538 /* We force the wide_int CST to the range of the type TYPE by sign or
1539 zero extending it. OVERFLOWABLE indicates if we are interested in
1540 overflow of the value, when >0 we are only interested in signed
1541 overflow, for <0 we are interested in any overflow. OVERFLOWED
1542 indicates whether overflow has already occurred. CONST_OVERFLOWED
1543 indicates whether constant overflow has already occurred. We force
1544 T's value to be within range of T's type (by setting to 0 or 1 all
1545 the bits outside the type's range). We set TREE_OVERFLOWED if,
1546 OVERFLOWED is nonzero,
1547 or OVERFLOWABLE is >0 and signed overflow occurs
1548 or OVERFLOWABLE is <0 and any overflow occurs
1549 We return a new tree node for the extended wide_int. The node
1550 is shared if no overflow flags are set. */
1551
1552
1553 tree
1554 force_fit_type (tree type, const poly_wide_int_ref &cst,
1555 int overflowable, bool overflowed)
1556 {
1557 signop sign = TYPE_SIGN (type);
1558
1559 /* If we need to set overflow flags, return a new unshared node. */
1560 if (overflowed || !wi::fits_to_tree_p (cst, type))
1561 {
1562 if (overflowed
1563 || overflowable < 0
1564 || (overflowable > 0 && sign == SIGNED))
1565 {
1566 poly_wide_int tmp = poly_wide_int::from (cst, TYPE_PRECISION (type),
1567 sign);
1568 tree t;
1569 if (tmp.is_constant ())
1570 t = build_new_int_cst (type, tmp.coeffs[0]);
1571 else
1572 {
1573 tree coeffs[NUM_POLY_INT_COEFFS];
1574 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1575 {
1576 coeffs[i] = build_new_int_cst (type, tmp.coeffs[i]);
1577 TREE_OVERFLOW (coeffs[i]) = 1;
1578 }
1579 t = build_new_poly_int_cst (type, coeffs);
1580 }
1581 TREE_OVERFLOW (t) = 1;
1582 return t;
1583 }
1584 }
1585
1586 /* Else build a shared node. */
1587 return wide_int_to_tree (type, cst);
1588 }
1589
1590 /* These are the hash table functions for the hash table of INTEGER_CST
1591 nodes of a sizetype. */
1592
1593 /* Return the hash code X, an INTEGER_CST. */
1594
1595 hashval_t
1596 int_cst_hasher::hash (tree x)
1597 {
1598 const_tree const t = x;
1599 hashval_t code = TYPE_UID (TREE_TYPE (t));
1600 int i;
1601
1602 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1603 code = iterative_hash_host_wide_int (TREE_INT_CST_ELT(t, i), code);
1604
1605 return code;
1606 }
1607
1608 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1609 is the same as that given by *Y, which is the same. */
1610
1611 bool
1612 int_cst_hasher::equal (tree x, tree y)
1613 {
1614 const_tree const xt = x;
1615 const_tree const yt = y;
1616
1617 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1618 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1619 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1620 return false;
1621
1622 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1623 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1624 return false;
1625
1626 return true;
1627 }
1628
1629 /* Cache wide_int CST into the TYPE_CACHED_VALUES cache for TYPE.
1630 SLOT is the slot entry to store it in, and MAX_SLOTS is the maximum
1631 number of slots that can be cached for the type. */
1632
1633 static inline tree
1634 cache_wide_int_in_type_cache (tree type, const wide_int &cst,
1635 int slot, int max_slots)
1636 {
1637 gcc_checking_assert (slot >= 0);
1638 /* Initialize cache. */
1639 if (!TYPE_CACHED_VALUES_P (type))
1640 {
1641 TYPE_CACHED_VALUES_P (type) = 1;
1642 TYPE_CACHED_VALUES (type) = make_tree_vec (max_slots);
1643 }
1644 tree t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), slot);
1645 if (!t)
1646 {
1647 /* Create a new shared int. */
1648 t = build_new_int_cst (type, cst);
1649 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), slot) = t;
1650 }
1651 return t;
1652 }
1653
1654 /* Create an INT_CST node of TYPE and value CST.
1655 The returned node is always shared. For small integers we use a
1656 per-type vector cache, for larger ones we use a single hash table.
1657 The value is extended from its precision according to the sign of
1658 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1659 the upper bits and ensures that hashing and value equality based
1660 upon the underlying HOST_WIDE_INTs works without masking. */
1661
1662 static tree
1663 wide_int_to_tree_1 (tree type, const wide_int_ref &pcst)
1664 {
1665 tree t;
1666 int ix = -1;
1667 int limit = 0;
1668
1669 gcc_assert (type);
1670 unsigned int prec = TYPE_PRECISION (type);
1671 signop sgn = TYPE_SIGN (type);
1672
1673 /* Verify that everything is canonical. */
1674 int l = pcst.get_len ();
1675 if (l > 1)
1676 {
1677 if (pcst.elt (l - 1) == 0)
1678 gcc_checking_assert (pcst.elt (l - 2) < 0);
1679 if (pcst.elt (l - 1) == HOST_WIDE_INT_M1)
1680 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1681 }
1682
1683 wide_int cst = wide_int::from (pcst, prec, sgn);
1684 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1685
1686 enum tree_code code = TREE_CODE (type);
1687 if (code == POINTER_TYPE || code == REFERENCE_TYPE)
1688 {
1689 /* Cache NULL pointer and zero bounds. */
1690 if (cst == 0)
1691 ix = 0;
1692 /* Cache upper bounds of pointers. */
1693 else if (cst == wi::max_value (prec, sgn))
1694 ix = 1;
1695 /* Cache 1 which is used for a non-zero range. */
1696 else if (cst == 1)
1697 ix = 2;
1698
1699 if (ix >= 0)
1700 {
1701 t = cache_wide_int_in_type_cache (type, cst, ix, 3);
1702 /* Make sure no one is clobbering the shared constant. */
1703 gcc_checking_assert (TREE_TYPE (t) == type
1704 && cst == wi::to_wide (t));
1705 return t;
1706 }
1707 }
1708 if (ext_len == 1)
1709 {
1710 /* We just need to store a single HOST_WIDE_INT. */
1711 HOST_WIDE_INT hwi;
1712 if (TYPE_UNSIGNED (type))
1713 hwi = cst.to_uhwi ();
1714 else
1715 hwi = cst.to_shwi ();
1716
1717 switch (code)
1718 {
1719 case NULLPTR_TYPE:
1720 gcc_assert (hwi == 0);
1721 /* Fallthru. */
1722
1723 case POINTER_TYPE:
1724 case REFERENCE_TYPE:
1725 /* Ignore pointers, as they were already handled above. */
1726 break;
1727
1728 case BOOLEAN_TYPE:
1729 /* Cache false or true. */
1730 limit = 2;
1731 if (IN_RANGE (hwi, 0, 1))
1732 ix = hwi;
1733 break;
1734
1735 case INTEGER_TYPE:
1736 case OFFSET_TYPE:
1737 if (TYPE_SIGN (type) == UNSIGNED)
1738 {
1739 /* Cache [0, N). */
1740 limit = param_integer_share_limit;
1741 if (IN_RANGE (hwi, 0, param_integer_share_limit - 1))
1742 ix = hwi;
1743 }
1744 else
1745 {
1746 /* Cache [-1, N). */
1747 limit = param_integer_share_limit + 1;
1748 if (IN_RANGE (hwi, -1, param_integer_share_limit - 1))
1749 ix = hwi + 1;
1750 }
1751 break;
1752
1753 case ENUMERAL_TYPE:
1754 break;
1755
1756 default:
1757 gcc_unreachable ();
1758 }
1759
1760 if (ix >= 0)
1761 {
1762 t = cache_wide_int_in_type_cache (type, cst, ix, limit);
1763 /* Make sure no one is clobbering the shared constant. */
1764 gcc_checking_assert (TREE_TYPE (t) == type
1765 && TREE_INT_CST_NUNITS (t) == 1
1766 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1767 && TREE_INT_CST_EXT_NUNITS (t) == 1
1768 && TREE_INT_CST_ELT (t, 0) == hwi);
1769 return t;
1770 }
1771 else
1772 {
1773 /* Use the cache of larger shared ints, using int_cst_node as
1774 a temporary. */
1775
1776 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1777 TREE_TYPE (int_cst_node) = type;
1778
1779 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1780 t = *slot;
1781 if (!t)
1782 {
1783 /* Insert this one into the hash table. */
1784 t = int_cst_node;
1785 *slot = t;
1786 /* Make a new node for next time round. */
1787 int_cst_node = make_int_cst (1, 1);
1788 }
1789 }
1790 }
1791 else
1792 {
1793 /* The value either hashes properly or we drop it on the floor
1794 for the gc to take care of. There will not be enough of them
1795 to worry about. */
1796
1797 tree nt = build_new_int_cst (type, cst);
1798 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1799 t = *slot;
1800 if (!t)
1801 {
1802 /* Insert this one into the hash table. */
1803 t = nt;
1804 *slot = t;
1805 }
1806 else
1807 ggc_free (nt);
1808 }
1809
1810 return t;
1811 }
1812
1813 hashval_t
1814 poly_int_cst_hasher::hash (tree t)
1815 {
1816 inchash::hash hstate;
1817
1818 hstate.add_int (TYPE_UID (TREE_TYPE (t)));
1819 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1820 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
1821
1822 return hstate.end ();
1823 }
1824
1825 bool
1826 poly_int_cst_hasher::equal (tree x, const compare_type &y)
1827 {
1828 if (TREE_TYPE (x) != y.first)
1829 return false;
1830 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1831 if (wi::to_wide (POLY_INT_CST_COEFF (x, i)) != y.second->coeffs[i])
1832 return false;
1833 return true;
1834 }
1835
1836 /* Build a POLY_INT_CST node with type TYPE and with the elements in VALUES.
1837 The elements must also have type TYPE. */
1838
1839 tree
1840 build_poly_int_cst (tree type, const poly_wide_int_ref &values)
1841 {
1842 unsigned int prec = TYPE_PRECISION (type);
1843 gcc_assert (prec <= values.coeffs[0].get_precision ());
1844 poly_wide_int c = poly_wide_int::from (values, prec, SIGNED);
1845
1846 inchash::hash h;
1847 h.add_int (TYPE_UID (type));
1848 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1849 h.add_wide_int (c.coeffs[i]);
1850 poly_int_cst_hasher::compare_type comp (type, &c);
1851 tree *slot = poly_int_cst_hash_table->find_slot_with_hash (comp, h.end (),
1852 INSERT);
1853 if (*slot == NULL_TREE)
1854 {
1855 tree coeffs[NUM_POLY_INT_COEFFS];
1856 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1857 coeffs[i] = wide_int_to_tree_1 (type, c.coeffs[i]);
1858 *slot = build_new_poly_int_cst (type, coeffs);
1859 }
1860 return *slot;
1861 }
1862
1863 /* Create a constant tree with value VALUE in type TYPE. */
1864
1865 tree
1866 wide_int_to_tree (tree type, const poly_wide_int_ref &value)
1867 {
1868 if (value.is_constant ())
1869 return wide_int_to_tree_1 (type, value.coeffs[0]);
1870 return build_poly_int_cst (type, value);
1871 }
1872
1873 /* Insert INTEGER_CST T into a cache of integer constants. And return
1874 the cached constant (which may or may not be T). If MIGHT_DUPLICATE
1875 is false, and T falls into the type's 'smaller values' range, there
1876 cannot be an existing entry. Otherwise, if MIGHT_DUPLICATE is true,
1877 or the value is large, should an existing entry exist, it is
1878 returned (rather than inserting T). */
1879
1880 tree
1881 cache_integer_cst (tree t, bool might_duplicate ATTRIBUTE_UNUSED)
1882 {
1883 tree type = TREE_TYPE (t);
1884 int ix = -1;
1885 int limit = 0;
1886 int prec = TYPE_PRECISION (type);
1887
1888 gcc_assert (!TREE_OVERFLOW (t));
1889
1890 /* The caching indices here must match those in
1891 wide_int_to_type_1. */
1892 switch (TREE_CODE (type))
1893 {
1894 case NULLPTR_TYPE:
1895 gcc_checking_assert (integer_zerop (t));
1896 /* Fallthru. */
1897
1898 case POINTER_TYPE:
1899 case REFERENCE_TYPE:
1900 {
1901 if (integer_zerop (t))
1902 ix = 0;
1903 else if (integer_onep (t))
1904 ix = 2;
1905
1906 if (ix >= 0)
1907 limit = 3;
1908 }
1909 break;
1910
1911 case BOOLEAN_TYPE:
1912 /* Cache false or true. */
1913 limit = 2;
1914 if (wi::ltu_p (wi::to_wide (t), 2))
1915 ix = TREE_INT_CST_ELT (t, 0);
1916 break;
1917
1918 case INTEGER_TYPE:
1919 case OFFSET_TYPE:
1920 if (TYPE_UNSIGNED (type))
1921 {
1922 /* Cache 0..N */
1923 limit = param_integer_share_limit;
1924
1925 /* This is a little hokie, but if the prec is smaller than
1926 what is necessary to hold param_integer_share_limit, then the
1927 obvious test will not get the correct answer. */
1928 if (prec < HOST_BITS_PER_WIDE_INT)
1929 {
1930 if (tree_to_uhwi (t)
1931 < (unsigned HOST_WIDE_INT) param_integer_share_limit)
1932 ix = tree_to_uhwi (t);
1933 }
1934 else if (wi::ltu_p (wi::to_wide (t), param_integer_share_limit))
1935 ix = tree_to_uhwi (t);
1936 }
1937 else
1938 {
1939 /* Cache -1..N */
1940 limit = param_integer_share_limit + 1;
1941
1942 if (integer_minus_onep (t))
1943 ix = 0;
1944 else if (!wi::neg_p (wi::to_wide (t)))
1945 {
1946 if (prec < HOST_BITS_PER_WIDE_INT)
1947 {
1948 if (tree_to_shwi (t) < param_integer_share_limit)
1949 ix = tree_to_shwi (t) + 1;
1950 }
1951 else if (wi::ltu_p (wi::to_wide (t), param_integer_share_limit))
1952 ix = tree_to_shwi (t) + 1;
1953 }
1954 }
1955 break;
1956
1957 case ENUMERAL_TYPE:
1958 /* The slot used by TYPE_CACHED_VALUES is used for the enum
1959 members. */
1960 break;
1961
1962 default:
1963 gcc_unreachable ();
1964 }
1965
1966 if (ix >= 0)
1967 {
1968 /* Look for it in the type's vector of small shared ints. */
1969 if (!TYPE_CACHED_VALUES_P (type))
1970 {
1971 TYPE_CACHED_VALUES_P (type) = 1;
1972 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1973 }
1974
1975 if (tree r = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix))
1976 {
1977 gcc_checking_assert (might_duplicate);
1978 t = r;
1979 }
1980 else
1981 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1982 }
1983 else
1984 {
1985 /* Use the cache of larger shared ints. */
1986 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1987 if (tree r = *slot)
1988 {
1989 /* If there is already an entry for the number verify it's the
1990 same value. */
1991 gcc_checking_assert (wi::to_wide (tree (r)) == wi::to_wide (t));
1992 /* And return the cached value. */
1993 t = r;
1994 }
1995 else
1996 /* Otherwise insert this one into the hash table. */
1997 *slot = t;
1998 }
1999
2000 return t;
2001 }
2002
2003
2004 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
2005 and the rest are zeros. */
2006
2007 tree
2008 build_low_bits_mask (tree type, unsigned bits)
2009 {
2010 gcc_assert (bits <= TYPE_PRECISION (type));
2011
2012 return wide_int_to_tree (type, wi::mask (bits, false,
2013 TYPE_PRECISION (type)));
2014 }
2015
2016 /* Checks that X is integer constant that can be expressed in (unsigned)
2017 HOST_WIDE_INT without loss of precision. */
2018
2019 bool
2020 cst_and_fits_in_hwi (const_tree x)
2021 {
2022 return (TREE_CODE (x) == INTEGER_CST
2023 && (tree_fits_shwi_p (x) || tree_fits_uhwi_p (x)));
2024 }
2025
2026 /* Build a newly constructed VECTOR_CST with the given values of
2027 (VECTOR_CST_)LOG2_NPATTERNS and (VECTOR_CST_)NELTS_PER_PATTERN. */
2028
2029 tree
2030 make_vector (unsigned log2_npatterns,
2031 unsigned int nelts_per_pattern MEM_STAT_DECL)
2032 {
2033 gcc_assert (IN_RANGE (nelts_per_pattern, 1, 3));
2034 tree t;
2035 unsigned npatterns = 1 << log2_npatterns;
2036 unsigned encoded_nelts = npatterns * nelts_per_pattern;
2037 unsigned length = (sizeof (struct tree_vector)
2038 + (encoded_nelts - 1) * sizeof (tree));
2039
2040 record_node_allocation_statistics (VECTOR_CST, length);
2041
2042 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2043
2044 TREE_SET_CODE (t, VECTOR_CST);
2045 TREE_CONSTANT (t) = 1;
2046 VECTOR_CST_LOG2_NPATTERNS (t) = log2_npatterns;
2047 VECTOR_CST_NELTS_PER_PATTERN (t) = nelts_per_pattern;
2048
2049 return t;
2050 }
2051
2052 /* Return a new VECTOR_CST node whose type is TYPE and whose values
2053 are extracted from V, a vector of CONSTRUCTOR_ELT. */
2054
2055 tree
2056 build_vector_from_ctor (tree type, const vec<constructor_elt, va_gc> *v)
2057 {
2058 if (vec_safe_length (v) == 0)
2059 return build_zero_cst (type);
2060
2061 unsigned HOST_WIDE_INT idx, nelts;
2062 tree value;
2063
2064 /* We can't construct a VECTOR_CST for a variable number of elements. */
2065 nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
2066 tree_vector_builder vec (type, nelts, 1);
2067 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
2068 {
2069 if (TREE_CODE (value) == VECTOR_CST)
2070 {
2071 /* If NELTS is constant then this must be too. */
2072 unsigned int sub_nelts = VECTOR_CST_NELTS (value).to_constant ();
2073 for (unsigned i = 0; i < sub_nelts; ++i)
2074 vec.quick_push (VECTOR_CST_ELT (value, i));
2075 }
2076 else
2077 vec.quick_push (value);
2078 }
2079 while (vec.length () < nelts)
2080 vec.quick_push (build_zero_cst (TREE_TYPE (type)));
2081
2082 return vec.build ();
2083 }
2084
2085 /* Build a vector of type VECTYPE where all the elements are SCs. */
2086 tree
2087 build_vector_from_val (tree vectype, tree sc)
2088 {
2089 unsigned HOST_WIDE_INT i, nunits;
2090
2091 if (sc == error_mark_node)
2092 return sc;
2093
2094 /* Verify that the vector type is suitable for SC. Note that there
2095 is some inconsistency in the type-system with respect to restrict
2096 qualifications of pointers. Vector types always have a main-variant
2097 element type and the qualification is applied to the vector-type.
2098 So TREE_TYPE (vector-type) does not return a properly qualified
2099 vector element-type. */
2100 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
2101 TREE_TYPE (vectype)));
2102
2103 if (CONSTANT_CLASS_P (sc))
2104 {
2105 tree_vector_builder v (vectype, 1, 1);
2106 v.quick_push (sc);
2107 return v.build ();
2108 }
2109 else if (!TYPE_VECTOR_SUBPARTS (vectype).is_constant (&nunits))
2110 return fold_build1 (VEC_DUPLICATE_EXPR, vectype, sc);
2111 else
2112 {
2113 vec<constructor_elt, va_gc> *v;
2114 vec_alloc (v, nunits);
2115 for (i = 0; i < nunits; ++i)
2116 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
2117 return build_constructor (vectype, v);
2118 }
2119 }
2120
2121 /* If TYPE is not a vector type, just return SC, otherwise return
2122 build_vector_from_val (TYPE, SC). */
2123
2124 tree
2125 build_uniform_cst (tree type, tree sc)
2126 {
2127 if (!VECTOR_TYPE_P (type))
2128 return sc;
2129
2130 return build_vector_from_val (type, sc);
2131 }
2132
2133 /* Build a vector series of type TYPE in which element I has the value
2134 BASE + I * STEP. The result is a constant if BASE and STEP are constant
2135 and a VEC_SERIES_EXPR otherwise. */
2136
2137 tree
2138 build_vec_series (tree type, tree base, tree step)
2139 {
2140 if (integer_zerop (step))
2141 return build_vector_from_val (type, base);
2142 if (TREE_CODE (base) == INTEGER_CST && TREE_CODE (step) == INTEGER_CST)
2143 {
2144 tree_vector_builder builder (type, 1, 3);
2145 tree elt1 = wide_int_to_tree (TREE_TYPE (base),
2146 wi::to_wide (base) + wi::to_wide (step));
2147 tree elt2 = wide_int_to_tree (TREE_TYPE (base),
2148 wi::to_wide (elt1) + wi::to_wide (step));
2149 builder.quick_push (base);
2150 builder.quick_push (elt1);
2151 builder.quick_push (elt2);
2152 return builder.build ();
2153 }
2154 return build2 (VEC_SERIES_EXPR, type, base, step);
2155 }
2156
2157 /* Return a vector with the same number of units and number of bits
2158 as VEC_TYPE, but in which the elements are a linear series of unsigned
2159 integers { BASE, BASE + STEP, BASE + STEP * 2, ... }. */
2160
2161 tree
2162 build_index_vector (tree vec_type, poly_uint64 base, poly_uint64 step)
2163 {
2164 tree index_vec_type = vec_type;
2165 tree index_elt_type = TREE_TYPE (vec_type);
2166 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vec_type);
2167 if (!INTEGRAL_TYPE_P (index_elt_type) || !TYPE_UNSIGNED (index_elt_type))
2168 {
2169 index_elt_type = build_nonstandard_integer_type
2170 (GET_MODE_BITSIZE (SCALAR_TYPE_MODE (index_elt_type)), true);
2171 index_vec_type = build_vector_type (index_elt_type, nunits);
2172 }
2173
2174 tree_vector_builder v (index_vec_type, 1, 3);
2175 for (unsigned int i = 0; i < 3; ++i)
2176 v.quick_push (build_int_cstu (index_elt_type, base + i * step));
2177 return v.build ();
2178 }
2179
2180 /* Return a VECTOR_CST of type VEC_TYPE in which the first NUM_A
2181 elements are A and the rest are B. */
2182
2183 tree
2184 build_vector_a_then_b (tree vec_type, unsigned int num_a, tree a, tree b)
2185 {
2186 gcc_assert (known_le (num_a, TYPE_VECTOR_SUBPARTS (vec_type)));
2187 unsigned int count = constant_lower_bound (TYPE_VECTOR_SUBPARTS (vec_type));
2188 /* Optimize the constant case. */
2189 if ((count & 1) == 0 && TYPE_VECTOR_SUBPARTS (vec_type).is_constant ())
2190 count /= 2;
2191 tree_vector_builder builder (vec_type, count, 2);
2192 for (unsigned int i = 0; i < count * 2; ++i)
2193 builder.quick_push (i < num_a ? a : b);
2194 return builder.build ();
2195 }
2196
2197 /* Something has messed with the elements of CONSTRUCTOR C after it was built;
2198 calculate TREE_CONSTANT and TREE_SIDE_EFFECTS. */
2199
2200 void
2201 recompute_constructor_flags (tree c)
2202 {
2203 unsigned int i;
2204 tree val;
2205 bool constant_p = true;
2206 bool side_effects_p = false;
2207 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2208
2209 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2210 {
2211 /* Mostly ctors will have elts that don't have side-effects, so
2212 the usual case is to scan all the elements. Hence a single
2213 loop for both const and side effects, rather than one loop
2214 each (with early outs). */
2215 if (!TREE_CONSTANT (val))
2216 constant_p = false;
2217 if (TREE_SIDE_EFFECTS (val))
2218 side_effects_p = true;
2219 }
2220
2221 TREE_SIDE_EFFECTS (c) = side_effects_p;
2222 TREE_CONSTANT (c) = constant_p;
2223 }
2224
2225 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for
2226 CONSTRUCTOR C. */
2227
2228 void
2229 verify_constructor_flags (tree c)
2230 {
2231 unsigned int i;
2232 tree val;
2233 bool constant_p = TREE_CONSTANT (c);
2234 bool side_effects_p = TREE_SIDE_EFFECTS (c);
2235 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2236
2237 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2238 {
2239 if (constant_p && !TREE_CONSTANT (val))
2240 internal_error ("non-constant element in constant CONSTRUCTOR");
2241 if (!side_effects_p && TREE_SIDE_EFFECTS (val))
2242 internal_error ("side-effects element in no-side-effects CONSTRUCTOR");
2243 }
2244 }
2245
2246 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2247 are in the vec pointed to by VALS. */
2248 tree
2249 build_constructor (tree type, vec<constructor_elt, va_gc> *vals MEM_STAT_DECL)
2250 {
2251 tree c = make_node (CONSTRUCTOR PASS_MEM_STAT);
2252
2253 TREE_TYPE (c) = type;
2254 CONSTRUCTOR_ELTS (c) = vals;
2255
2256 recompute_constructor_flags (c);
2257
2258 return c;
2259 }
2260
2261 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
2262 INDEX and VALUE. */
2263 tree
2264 build_constructor_single (tree type, tree index, tree value)
2265 {
2266 vec<constructor_elt, va_gc> *v;
2267 constructor_elt elt = {index, value};
2268
2269 vec_alloc (v, 1);
2270 v->quick_push (elt);
2271
2272 return build_constructor (type, v);
2273 }
2274
2275
2276 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2277 are in a list pointed to by VALS. */
2278 tree
2279 build_constructor_from_list (tree type, tree vals)
2280 {
2281 tree t;
2282 vec<constructor_elt, va_gc> *v = NULL;
2283
2284 if (vals)
2285 {
2286 vec_alloc (v, list_length (vals));
2287 for (t = vals; t; t = TREE_CHAIN (t))
2288 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
2289 }
2290
2291 return build_constructor (type, v);
2292 }
2293
2294 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2295 are in a vector pointed to by VALS. Note that the TREE_PURPOSE
2296 fields in the constructor remain null. */
2297
2298 tree
2299 build_constructor_from_vec (tree type, const vec<tree, va_gc> *vals)
2300 {
2301 vec<constructor_elt, va_gc> *v = NULL;
2302
2303 for (tree t : vals)
2304 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, t);
2305
2306 return build_constructor (type, v);
2307 }
2308
2309 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
2310 of elements, provided as index/value pairs. */
2311
2312 tree
2313 build_constructor_va (tree type, int nelts, ...)
2314 {
2315 vec<constructor_elt, va_gc> *v = NULL;
2316 va_list p;
2317
2318 va_start (p, nelts);
2319 vec_alloc (v, nelts);
2320 while (nelts--)
2321 {
2322 tree index = va_arg (p, tree);
2323 tree value = va_arg (p, tree);
2324 CONSTRUCTOR_APPEND_ELT (v, index, value);
2325 }
2326 va_end (p);
2327 return build_constructor (type, v);
2328 }
2329
2330 /* Return a node of type TYPE for which TREE_CLOBBER_P is true. */
2331
2332 tree
2333 build_clobber (tree type, enum clobber_kind kind)
2334 {
2335 tree clobber = build_constructor (type, NULL);
2336 TREE_THIS_VOLATILE (clobber) = true;
2337 CLOBBER_KIND (clobber) = kind;
2338 return clobber;
2339 }
2340
2341 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
2342
2343 tree
2344 build_fixed (tree type, FIXED_VALUE_TYPE f)
2345 {
2346 tree v;
2347 FIXED_VALUE_TYPE *fp;
2348
2349 v = make_node (FIXED_CST);
2350 fp = ggc_alloc<fixed_value> ();
2351 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
2352
2353 TREE_TYPE (v) = type;
2354 TREE_FIXED_CST_PTR (v) = fp;
2355 return v;
2356 }
2357
2358 /* Return a new REAL_CST node whose type is TYPE and value is D. */
2359
2360 tree
2361 build_real (tree type, REAL_VALUE_TYPE d)
2362 {
2363 tree v;
2364 int overflow = 0;
2365
2366 /* dconst{0,1,2,m1,half} are used in various places in
2367 the middle-end and optimizers, allow them here
2368 even for decimal floating point types as an exception
2369 by converting them to decimal. */
2370 if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type))
2371 && (d.cl == rvc_normal || d.cl == rvc_zero)
2372 && !d.decimal)
2373 {
2374 if (memcmp (&d, &dconst1, sizeof (d)) == 0)
2375 decimal_real_from_string (&d, "1");
2376 else if (memcmp (&d, &dconst2, sizeof (d)) == 0)
2377 decimal_real_from_string (&d, "2");
2378 else if (memcmp (&d, &dconstm1, sizeof (d)) == 0)
2379 decimal_real_from_string (&d, "-1");
2380 else if (memcmp (&d, &dconsthalf, sizeof (d)) == 0)
2381 decimal_real_from_string (&d, "0.5");
2382 else if (memcmp (&d, &dconst0, sizeof (d)) == 0)
2383 {
2384 /* Make sure to give zero the minimum quantum exponent for
2385 the type (which corresponds to all bits zero). */
2386 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
2387 char buf[16];
2388 sprintf (buf, "0e%d", fmt->emin - fmt->p);
2389 decimal_real_from_string (&d, buf);
2390 }
2391 else
2392 gcc_unreachable ();
2393 }
2394
2395 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
2396 Consider doing it via real_convert now. */
2397
2398 v = make_node (REAL_CST);
2399 TREE_TYPE (v) = type;
2400 memcpy (TREE_REAL_CST_PTR (v), &d, sizeof (REAL_VALUE_TYPE));
2401 TREE_OVERFLOW (v) = overflow;
2402 return v;
2403 }
2404
2405 /* Like build_real, but first truncate D to the type. */
2406
2407 tree
2408 build_real_truncate (tree type, REAL_VALUE_TYPE d)
2409 {
2410 return build_real (type, real_value_truncate (TYPE_MODE (type), d));
2411 }
2412
2413 /* Return a new REAL_CST node whose type is TYPE
2414 and whose value is the integer value of the INTEGER_CST node I. */
2415
2416 REAL_VALUE_TYPE
2417 real_value_from_int_cst (const_tree type, const_tree i)
2418 {
2419 REAL_VALUE_TYPE d;
2420
2421 /* Clear all bits of the real value type so that we can later do
2422 bitwise comparisons to see if two values are the same. */
2423 memset (&d, 0, sizeof d);
2424
2425 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, wi::to_wide (i),
2426 TYPE_SIGN (TREE_TYPE (i)));
2427 return d;
2428 }
2429
2430 /* Given a tree representing an integer constant I, return a tree
2431 representing the same value as a floating-point constant of type TYPE. */
2432
2433 tree
2434 build_real_from_int_cst (tree type, const_tree i)
2435 {
2436 tree v;
2437 int overflow = TREE_OVERFLOW (i);
2438
2439 v = build_real (type, real_value_from_int_cst (type, i));
2440
2441 TREE_OVERFLOW (v) |= overflow;
2442 return v;
2443 }
2444
2445 /* Return a new REAL_CST node whose type is TYPE
2446 and whose value is the integer value I which has sign SGN. */
2447
2448 tree
2449 build_real_from_wide (tree type, const wide_int_ref &i, signop sgn)
2450 {
2451 REAL_VALUE_TYPE d;
2452
2453 /* Clear all bits of the real value type so that we can later do
2454 bitwise comparisons to see if two values are the same. */
2455 memset (&d, 0, sizeof d);
2456
2457 real_from_integer (&d, TYPE_MODE (type), i, sgn);
2458 return build_real (type, d);
2459 }
2460
2461 /* Return a newly constructed STRING_CST node whose value is the LEN
2462 characters at STR when STR is nonnull, or all zeros otherwise.
2463 Note that for a C string literal, LEN should include the trailing NUL.
2464 The TREE_TYPE is not initialized. */
2465
2466 tree
2467 build_string (unsigned len, const char *str /*= NULL */)
2468 {
2469 /* Do not waste bytes provided by padding of struct tree_string. */
2470 unsigned size = len + offsetof (struct tree_string, str) + 1;
2471
2472 record_node_allocation_statistics (STRING_CST, size);
2473
2474 tree s = (tree) ggc_internal_alloc (size);
2475
2476 memset (s, 0, sizeof (struct tree_typed));
2477 TREE_SET_CODE (s, STRING_CST);
2478 TREE_CONSTANT (s) = 1;
2479 TREE_STRING_LENGTH (s) = len;
2480 if (str)
2481 memcpy (s->string.str, str, len);
2482 else
2483 memset (s->string.str, 0, len);
2484 s->string.str[len] = '\0';
2485
2486 return s;
2487 }
2488
2489 /* Return a newly constructed COMPLEX_CST node whose value is
2490 specified by the real and imaginary parts REAL and IMAG.
2491 Both REAL and IMAG should be constant nodes. TYPE, if specified,
2492 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
2493
2494 tree
2495 build_complex (tree type, tree real, tree imag)
2496 {
2497 gcc_assert (CONSTANT_CLASS_P (real));
2498 gcc_assert (CONSTANT_CLASS_P (imag));
2499
2500 tree t = make_node (COMPLEX_CST);
2501
2502 TREE_REALPART (t) = real;
2503 TREE_IMAGPART (t) = imag;
2504 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
2505 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
2506 return t;
2507 }
2508
2509 /* Build a complex (inf +- 0i), such as for the result of cproj.
2510 TYPE is the complex tree type of the result. If NEG is true, the
2511 imaginary zero is negative. */
2512
2513 tree
2514 build_complex_inf (tree type, bool neg)
2515 {
2516 REAL_VALUE_TYPE rzero = dconst0;
2517
2518 rzero.sign = neg;
2519 return build_complex (type, build_real (TREE_TYPE (type), dconstinf),
2520 build_real (TREE_TYPE (type), rzero));
2521 }
2522
2523 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
2524 element is set to 1. In particular, this is 1 + i for complex types. */
2525
2526 tree
2527 build_each_one_cst (tree type)
2528 {
2529 if (TREE_CODE (type) == COMPLEX_TYPE)
2530 {
2531 tree scalar = build_one_cst (TREE_TYPE (type));
2532 return build_complex (type, scalar, scalar);
2533 }
2534 else
2535 return build_one_cst (type);
2536 }
2537
2538 /* Return a constant of arithmetic type TYPE which is the
2539 multiplicative identity of the set TYPE. */
2540
2541 tree
2542 build_one_cst (tree type)
2543 {
2544 switch (TREE_CODE (type))
2545 {
2546 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2547 case POINTER_TYPE: case REFERENCE_TYPE:
2548 case OFFSET_TYPE:
2549 return build_int_cst (type, 1);
2550
2551 case REAL_TYPE:
2552 return build_real (type, dconst1);
2553
2554 case FIXED_POINT_TYPE:
2555 /* We can only generate 1 for accum types. */
2556 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2557 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
2558
2559 case VECTOR_TYPE:
2560 {
2561 tree scalar = build_one_cst (TREE_TYPE (type));
2562
2563 return build_vector_from_val (type, scalar);
2564 }
2565
2566 case COMPLEX_TYPE:
2567 return build_complex (type,
2568 build_one_cst (TREE_TYPE (type)),
2569 build_zero_cst (TREE_TYPE (type)));
2570
2571 default:
2572 gcc_unreachable ();
2573 }
2574 }
2575
2576 /* Return an integer of type TYPE containing all 1's in as much precision as
2577 it contains, or a complex or vector whose subparts are such integers. */
2578
2579 tree
2580 build_all_ones_cst (tree type)
2581 {
2582 if (TREE_CODE (type) == COMPLEX_TYPE)
2583 {
2584 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2585 return build_complex (type, scalar, scalar);
2586 }
2587 else
2588 return build_minus_one_cst (type);
2589 }
2590
2591 /* Return a constant of arithmetic type TYPE which is the
2592 opposite of the multiplicative identity of the set TYPE. */
2593
2594 tree
2595 build_minus_one_cst (tree type)
2596 {
2597 switch (TREE_CODE (type))
2598 {
2599 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2600 case POINTER_TYPE: case REFERENCE_TYPE:
2601 case OFFSET_TYPE:
2602 return build_int_cst (type, -1);
2603
2604 case REAL_TYPE:
2605 return build_real (type, dconstm1);
2606
2607 case FIXED_POINT_TYPE:
2608 /* We can only generate 1 for accum types. */
2609 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2610 return build_fixed (type,
2611 fixed_from_double_int (double_int_minus_one,
2612 SCALAR_TYPE_MODE (type)));
2613
2614 case VECTOR_TYPE:
2615 {
2616 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2617
2618 return build_vector_from_val (type, scalar);
2619 }
2620
2621 case COMPLEX_TYPE:
2622 return build_complex (type,
2623 build_minus_one_cst (TREE_TYPE (type)),
2624 build_zero_cst (TREE_TYPE (type)));
2625
2626 default:
2627 gcc_unreachable ();
2628 }
2629 }
2630
2631 /* Build 0 constant of type TYPE. This is used by constructor folding
2632 and thus the constant should be represented in memory by
2633 zero(es). */
2634
2635 tree
2636 build_zero_cst (tree type)
2637 {
2638 switch (TREE_CODE (type))
2639 {
2640 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2641 case POINTER_TYPE: case REFERENCE_TYPE:
2642 case OFFSET_TYPE: case NULLPTR_TYPE:
2643 return build_int_cst (type, 0);
2644
2645 case REAL_TYPE:
2646 return build_real (type, dconst0);
2647
2648 case FIXED_POINT_TYPE:
2649 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2650
2651 case VECTOR_TYPE:
2652 {
2653 tree scalar = build_zero_cst (TREE_TYPE (type));
2654
2655 return build_vector_from_val (type, scalar);
2656 }
2657
2658 case COMPLEX_TYPE:
2659 {
2660 tree zero = build_zero_cst (TREE_TYPE (type));
2661
2662 return build_complex (type, zero, zero);
2663 }
2664
2665 default:
2666 if (!AGGREGATE_TYPE_P (type))
2667 return fold_convert (type, integer_zero_node);
2668 return build_constructor (type, NULL);
2669 }
2670 }
2671
2672
2673 /* Build a BINFO with LEN language slots. */
2674
2675 tree
2676 make_tree_binfo (unsigned base_binfos MEM_STAT_DECL)
2677 {
2678 tree t;
2679 size_t length = (offsetof (struct tree_binfo, base_binfos)
2680 + vec<tree, va_gc>::embedded_size (base_binfos));
2681
2682 record_node_allocation_statistics (TREE_BINFO, length);
2683
2684 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2685
2686 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2687
2688 TREE_SET_CODE (t, TREE_BINFO);
2689
2690 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2691
2692 return t;
2693 }
2694
2695 /* Create a CASE_LABEL_EXPR tree node and return it. */
2696
2697 tree
2698 build_case_label (tree low_value, tree high_value, tree label_decl)
2699 {
2700 tree t = make_node (CASE_LABEL_EXPR);
2701
2702 TREE_TYPE (t) = void_type_node;
2703 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2704
2705 CASE_LOW (t) = low_value;
2706 CASE_HIGH (t) = high_value;
2707 CASE_LABEL (t) = label_decl;
2708 CASE_CHAIN (t) = NULL_TREE;
2709
2710 return t;
2711 }
2712
2713 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2714 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2715 The latter determines the length of the HOST_WIDE_INT vector. */
2716
2717 tree
2718 make_int_cst (int len, int ext_len MEM_STAT_DECL)
2719 {
2720 tree t;
2721 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2722 + sizeof (struct tree_int_cst));
2723
2724 gcc_assert (len);
2725 record_node_allocation_statistics (INTEGER_CST, length);
2726
2727 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2728
2729 TREE_SET_CODE (t, INTEGER_CST);
2730 TREE_INT_CST_NUNITS (t) = len;
2731 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2732 /* to_offset can only be applied to trees that are offset_int-sized
2733 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2734 must be exactly the precision of offset_int and so LEN is correct. */
2735 if (ext_len <= OFFSET_INT_ELTS)
2736 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2737 else
2738 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2739
2740 TREE_CONSTANT (t) = 1;
2741
2742 return t;
2743 }
2744
2745 /* Build a newly constructed TREE_VEC node of length LEN. */
2746
2747 tree
2748 make_tree_vec (int len MEM_STAT_DECL)
2749 {
2750 tree t;
2751 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2752
2753 record_node_allocation_statistics (TREE_VEC, length);
2754
2755 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2756
2757 TREE_SET_CODE (t, TREE_VEC);
2758 TREE_VEC_LENGTH (t) = len;
2759
2760 return t;
2761 }
2762
2763 /* Grow a TREE_VEC node to new length LEN. */
2764
2765 tree
2766 grow_tree_vec (tree v, int len MEM_STAT_DECL)
2767 {
2768 gcc_assert (TREE_CODE (v) == TREE_VEC);
2769
2770 int oldlen = TREE_VEC_LENGTH (v);
2771 gcc_assert (len > oldlen);
2772
2773 size_t oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2774 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2775
2776 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2777
2778 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2779
2780 TREE_VEC_LENGTH (v) = len;
2781
2782 return v;
2783 }
2784 \f
2785 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
2786 fixed, and scalar, complex or vector. */
2787
2788 bool
2789 zerop (const_tree expr)
2790 {
2791 return (integer_zerop (expr)
2792 || real_zerop (expr)
2793 || fixed_zerop (expr));
2794 }
2795
2796 /* Return 1 if EXPR is the integer constant zero or a complex constant
2797 of zero, or a location wrapper for such a constant. */
2798
2799 bool
2800 integer_zerop (const_tree expr)
2801 {
2802 STRIP_ANY_LOCATION_WRAPPER (expr);
2803
2804 switch (TREE_CODE (expr))
2805 {
2806 case INTEGER_CST:
2807 return wi::to_wide (expr) == 0;
2808 case COMPLEX_CST:
2809 return (integer_zerop (TREE_REALPART (expr))
2810 && integer_zerop (TREE_IMAGPART (expr)));
2811 case VECTOR_CST:
2812 return (VECTOR_CST_NPATTERNS (expr) == 1
2813 && VECTOR_CST_DUPLICATE_P (expr)
2814 && integer_zerop (VECTOR_CST_ENCODED_ELT (expr, 0)));
2815 default:
2816 return false;
2817 }
2818 }
2819
2820 /* Return 1 if EXPR is the integer constant one or the corresponding
2821 complex constant, or a location wrapper for such a constant. */
2822
2823 bool
2824 integer_onep (const_tree expr)
2825 {
2826 STRIP_ANY_LOCATION_WRAPPER (expr);
2827
2828 switch (TREE_CODE (expr))
2829 {
2830 case INTEGER_CST:
2831 return wi::eq_p (wi::to_widest (expr), 1);
2832 case COMPLEX_CST:
2833 return (integer_onep (TREE_REALPART (expr))
2834 && integer_zerop (TREE_IMAGPART (expr)));
2835 case VECTOR_CST:
2836 return (VECTOR_CST_NPATTERNS (expr) == 1
2837 && VECTOR_CST_DUPLICATE_P (expr)
2838 && integer_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2839 default:
2840 return false;
2841 }
2842 }
2843
2844 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2845 return 1 if every piece is the integer constant one.
2846 Also return 1 for location wrappers for such a constant. */
2847
2848 bool
2849 integer_each_onep (const_tree expr)
2850 {
2851 STRIP_ANY_LOCATION_WRAPPER (expr);
2852
2853 if (TREE_CODE (expr) == COMPLEX_CST)
2854 return (integer_onep (TREE_REALPART (expr))
2855 && integer_onep (TREE_IMAGPART (expr)));
2856 else
2857 return integer_onep (expr);
2858 }
2859
2860 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2861 it contains, or a complex or vector whose subparts are such integers,
2862 or a location wrapper for such a constant. */
2863
2864 bool
2865 integer_all_onesp (const_tree expr)
2866 {
2867 STRIP_ANY_LOCATION_WRAPPER (expr);
2868
2869 if (TREE_CODE (expr) == COMPLEX_CST
2870 && integer_all_onesp (TREE_REALPART (expr))
2871 && integer_all_onesp (TREE_IMAGPART (expr)))
2872 return true;
2873
2874 else if (TREE_CODE (expr) == VECTOR_CST)
2875 return (VECTOR_CST_NPATTERNS (expr) == 1
2876 && VECTOR_CST_DUPLICATE_P (expr)
2877 && integer_all_onesp (VECTOR_CST_ENCODED_ELT (expr, 0)));
2878
2879 else if (TREE_CODE (expr) != INTEGER_CST)
2880 return false;
2881
2882 return (wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED)
2883 == wi::to_wide (expr));
2884 }
2885
2886 /* Return 1 if EXPR is the integer constant minus one, or a location wrapper
2887 for such a constant. */
2888
2889 bool
2890 integer_minus_onep (const_tree expr)
2891 {
2892 STRIP_ANY_LOCATION_WRAPPER (expr);
2893
2894 if (TREE_CODE (expr) == COMPLEX_CST)
2895 return (integer_all_onesp (TREE_REALPART (expr))
2896 && integer_zerop (TREE_IMAGPART (expr)));
2897 else
2898 return integer_all_onesp (expr);
2899 }
2900
2901 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2902 one bit on), or a location wrapper for such a constant. */
2903
2904 bool
2905 integer_pow2p (const_tree expr)
2906 {
2907 STRIP_ANY_LOCATION_WRAPPER (expr);
2908
2909 if (TREE_CODE (expr) == COMPLEX_CST
2910 && integer_pow2p (TREE_REALPART (expr))
2911 && integer_zerop (TREE_IMAGPART (expr)))
2912 return true;
2913
2914 if (TREE_CODE (expr) != INTEGER_CST)
2915 return false;
2916
2917 return wi::popcount (wi::to_wide (expr)) == 1;
2918 }
2919
2920 /* Return 1 if EXPR is an integer constant other than zero or a
2921 complex constant other than zero, or a location wrapper for such a
2922 constant. */
2923
2924 bool
2925 integer_nonzerop (const_tree expr)
2926 {
2927 STRIP_ANY_LOCATION_WRAPPER (expr);
2928
2929 return ((TREE_CODE (expr) == INTEGER_CST
2930 && wi::to_wide (expr) != 0)
2931 || (TREE_CODE (expr) == COMPLEX_CST
2932 && (integer_nonzerop (TREE_REALPART (expr))
2933 || integer_nonzerop (TREE_IMAGPART (expr)))));
2934 }
2935
2936 /* Return 1 if EXPR is the integer constant one. For vector,
2937 return 1 if every piece is the integer constant minus one
2938 (representing the value TRUE).
2939 Also return 1 for location wrappers for such a constant. */
2940
2941 bool
2942 integer_truep (const_tree expr)
2943 {
2944 STRIP_ANY_LOCATION_WRAPPER (expr);
2945
2946 if (TREE_CODE (expr) == VECTOR_CST)
2947 return integer_all_onesp (expr);
2948 return integer_onep (expr);
2949 }
2950
2951 /* Return 1 if EXPR is the fixed-point constant zero, or a location wrapper
2952 for such a constant. */
2953
2954 bool
2955 fixed_zerop (const_tree expr)
2956 {
2957 STRIP_ANY_LOCATION_WRAPPER (expr);
2958
2959 return (TREE_CODE (expr) == FIXED_CST
2960 && TREE_FIXED_CST (expr).data.is_zero ());
2961 }
2962
2963 /* Return the power of two represented by a tree node known to be a
2964 power of two. */
2965
2966 int
2967 tree_log2 (const_tree expr)
2968 {
2969 if (TREE_CODE (expr) == COMPLEX_CST)
2970 return tree_log2 (TREE_REALPART (expr));
2971
2972 return wi::exact_log2 (wi::to_wide (expr));
2973 }
2974
2975 /* Similar, but return the largest integer Y such that 2 ** Y is less
2976 than or equal to EXPR. */
2977
2978 int
2979 tree_floor_log2 (const_tree expr)
2980 {
2981 if (TREE_CODE (expr) == COMPLEX_CST)
2982 return tree_log2 (TREE_REALPART (expr));
2983
2984 return wi::floor_log2 (wi::to_wide (expr));
2985 }
2986
2987 /* Return number of known trailing zero bits in EXPR, or, if the value of
2988 EXPR is known to be zero, the precision of it's type. */
2989
2990 unsigned int
2991 tree_ctz (const_tree expr)
2992 {
2993 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2994 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2995 return 0;
2996
2997 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2998 switch (TREE_CODE (expr))
2999 {
3000 case INTEGER_CST:
3001 ret1 = wi::ctz (wi::to_wide (expr));
3002 return MIN (ret1, prec);
3003 case SSA_NAME:
3004 ret1 = wi::ctz (get_nonzero_bits (expr));
3005 return MIN (ret1, prec);
3006 case PLUS_EXPR:
3007 case MINUS_EXPR:
3008 case BIT_IOR_EXPR:
3009 case BIT_XOR_EXPR:
3010 case MIN_EXPR:
3011 case MAX_EXPR:
3012 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3013 if (ret1 == 0)
3014 return ret1;
3015 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3016 return MIN (ret1, ret2);
3017 case POINTER_PLUS_EXPR:
3018 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3019 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3020 /* Second operand is sizetype, which could be in theory
3021 wider than pointer's precision. Make sure we never
3022 return more than prec. */
3023 ret2 = MIN (ret2, prec);
3024 return MIN (ret1, ret2);
3025 case BIT_AND_EXPR:
3026 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3027 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3028 return MAX (ret1, ret2);
3029 case MULT_EXPR:
3030 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3031 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3032 return MIN (ret1 + ret2, prec);
3033 case LSHIFT_EXPR:
3034 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3035 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
3036 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
3037 {
3038 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
3039 return MIN (ret1 + ret2, prec);
3040 }
3041 return ret1;
3042 case RSHIFT_EXPR:
3043 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
3044 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
3045 {
3046 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3047 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
3048 if (ret1 > ret2)
3049 return ret1 - ret2;
3050 }
3051 return 0;
3052 case TRUNC_DIV_EXPR:
3053 case CEIL_DIV_EXPR:
3054 case FLOOR_DIV_EXPR:
3055 case ROUND_DIV_EXPR:
3056 case EXACT_DIV_EXPR:
3057 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
3058 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
3059 {
3060 int l = tree_log2 (TREE_OPERAND (expr, 1));
3061 if (l >= 0)
3062 {
3063 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3064 ret2 = l;
3065 if (ret1 > ret2)
3066 return ret1 - ret2;
3067 }
3068 }
3069 return 0;
3070 CASE_CONVERT:
3071 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3072 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
3073 ret1 = prec;
3074 return MIN (ret1, prec);
3075 case SAVE_EXPR:
3076 return tree_ctz (TREE_OPERAND (expr, 0));
3077 case COND_EXPR:
3078 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
3079 if (ret1 == 0)
3080 return 0;
3081 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
3082 return MIN (ret1, ret2);
3083 case COMPOUND_EXPR:
3084 return tree_ctz (TREE_OPERAND (expr, 1));
3085 case ADDR_EXPR:
3086 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
3087 if (ret1 > BITS_PER_UNIT)
3088 {
3089 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
3090 return MIN (ret1, prec);
3091 }
3092 return 0;
3093 default:
3094 return 0;
3095 }
3096 }
3097
3098 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
3099 decimal float constants, so don't return 1 for them.
3100 Also return 1 for location wrappers around such a constant. */
3101
3102 bool
3103 real_zerop (const_tree expr)
3104 {
3105 STRIP_ANY_LOCATION_WRAPPER (expr);
3106
3107 switch (TREE_CODE (expr))
3108 {
3109 case REAL_CST:
3110 return real_equal (&TREE_REAL_CST (expr), &dconst0)
3111 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3112 case COMPLEX_CST:
3113 return real_zerop (TREE_REALPART (expr))
3114 && real_zerop (TREE_IMAGPART (expr));
3115 case VECTOR_CST:
3116 {
3117 /* Don't simply check for a duplicate because the predicate
3118 accepts both +0.0 and -0.0. */
3119 unsigned count = vector_cst_encoded_nelts (expr);
3120 for (unsigned int i = 0; i < count; ++i)
3121 if (!real_zerop (VECTOR_CST_ENCODED_ELT (expr, i)))
3122 return false;
3123 return true;
3124 }
3125 default:
3126 return false;
3127 }
3128 }
3129
3130 /* Return 1 if EXPR is the real constant one in real or complex form.
3131 Trailing zeroes matter for decimal float constants, so don't return
3132 1 for them.
3133 Also return 1 for location wrappers around such a constant. */
3134
3135 bool
3136 real_onep (const_tree expr)
3137 {
3138 STRIP_ANY_LOCATION_WRAPPER (expr);
3139
3140 switch (TREE_CODE (expr))
3141 {
3142 case REAL_CST:
3143 return real_equal (&TREE_REAL_CST (expr), &dconst1)
3144 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3145 case COMPLEX_CST:
3146 return real_onep (TREE_REALPART (expr))
3147 && real_zerop (TREE_IMAGPART (expr));
3148 case VECTOR_CST:
3149 return (VECTOR_CST_NPATTERNS (expr) == 1
3150 && VECTOR_CST_DUPLICATE_P (expr)
3151 && real_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
3152 default:
3153 return false;
3154 }
3155 }
3156
3157 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
3158 matter for decimal float constants, so don't return 1 for them.
3159 Also return 1 for location wrappers around such a constant. */
3160
3161 bool
3162 real_minus_onep (const_tree expr)
3163 {
3164 STRIP_ANY_LOCATION_WRAPPER (expr);
3165
3166 switch (TREE_CODE (expr))
3167 {
3168 case REAL_CST:
3169 return real_equal (&TREE_REAL_CST (expr), &dconstm1)
3170 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3171 case COMPLEX_CST:
3172 return real_minus_onep (TREE_REALPART (expr))
3173 && real_zerop (TREE_IMAGPART (expr));
3174 case VECTOR_CST:
3175 return (VECTOR_CST_NPATTERNS (expr) == 1
3176 && VECTOR_CST_DUPLICATE_P (expr)
3177 && real_minus_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
3178 default:
3179 return false;
3180 }
3181 }
3182
3183 /* Return true if T could be a floating point zero. */
3184
3185 bool
3186 real_maybe_zerop (const_tree expr)
3187 {
3188 switch (TREE_CODE (expr))
3189 {
3190 case REAL_CST:
3191 /* Can't use real_zerop here, as it always returns false for decimal
3192 floats. And can't use TREE_REAL_CST (expr).cl == rvc_zero
3193 either, as decimal zeros are rvc_normal. */
3194 return real_equal (&TREE_REAL_CST (expr), &dconst0);
3195 case COMPLEX_CST:
3196 return (real_maybe_zerop (TREE_REALPART (expr))
3197 || real_maybe_zerop (TREE_IMAGPART (expr)));
3198 case VECTOR_CST:
3199 {
3200 unsigned count = vector_cst_encoded_nelts (expr);
3201 for (unsigned int i = 0; i < count; ++i)
3202 if (real_maybe_zerop (VECTOR_CST_ENCODED_ELT (expr, i)))
3203 return true;
3204 return false;
3205 }
3206 default:
3207 /* Perhaps for SSA_NAMEs we could query frange. */
3208 return true;
3209 }
3210 }
3211
3212 /* Nonzero if EXP is a constant or a cast of a constant. */
3213
3214 bool
3215 really_constant_p (const_tree exp)
3216 {
3217 /* This is not quite the same as STRIP_NOPS. It does more. */
3218 while (CONVERT_EXPR_P (exp)
3219 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3220 exp = TREE_OPERAND (exp, 0);
3221 return TREE_CONSTANT (exp);
3222 }
3223
3224 /* Return true if T holds a polynomial pointer difference, storing it in
3225 *VALUE if so. A true return means that T's precision is no greater
3226 than 64 bits, which is the largest address space we support, so *VALUE
3227 never loses precision. However, the signedness of the result does
3228 not necessarily match the signedness of T: sometimes an unsigned type
3229 like sizetype is used to encode a value that is actually negative. */
3230
3231 bool
3232 ptrdiff_tree_p (const_tree t, poly_int64_pod *value)
3233 {
3234 if (!t)
3235 return false;
3236 if (TREE_CODE (t) == INTEGER_CST)
3237 {
3238 if (!cst_and_fits_in_hwi (t))
3239 return false;
3240 *value = int_cst_value (t);
3241 return true;
3242 }
3243 if (POLY_INT_CST_P (t))
3244 {
3245 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3246 if (!cst_and_fits_in_hwi (POLY_INT_CST_COEFF (t, i)))
3247 return false;
3248 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3249 value->coeffs[i] = int_cst_value (POLY_INT_CST_COEFF (t, i));
3250 return true;
3251 }
3252 return false;
3253 }
3254
3255 poly_int64
3256 tree_to_poly_int64 (const_tree t)
3257 {
3258 gcc_assert (tree_fits_poly_int64_p (t));
3259 if (POLY_INT_CST_P (t))
3260 return poly_int_cst_value (t).force_shwi ();
3261 return TREE_INT_CST_LOW (t);
3262 }
3263
3264 poly_uint64
3265 tree_to_poly_uint64 (const_tree t)
3266 {
3267 gcc_assert (tree_fits_poly_uint64_p (t));
3268 if (POLY_INT_CST_P (t))
3269 return poly_int_cst_value (t).force_uhwi ();
3270 return TREE_INT_CST_LOW (t);
3271 }
3272 \f
3273 /* Return first list element whose TREE_VALUE is ELEM.
3274 Return 0 if ELEM is not in LIST. */
3275
3276 tree
3277 value_member (tree elem, tree list)
3278 {
3279 while (list)
3280 {
3281 if (elem == TREE_VALUE (list))
3282 return list;
3283 list = TREE_CHAIN (list);
3284 }
3285 return NULL_TREE;
3286 }
3287
3288 /* Return first list element whose TREE_PURPOSE is ELEM.
3289 Return 0 if ELEM is not in LIST. */
3290
3291 tree
3292 purpose_member (const_tree elem, tree list)
3293 {
3294 while (list)
3295 {
3296 if (elem == TREE_PURPOSE (list))
3297 return list;
3298 list = TREE_CHAIN (list);
3299 }
3300 return NULL_TREE;
3301 }
3302
3303 /* Return true if ELEM is in V. */
3304
3305 bool
3306 vec_member (const_tree elem, vec<tree, va_gc> *v)
3307 {
3308 unsigned ix;
3309 tree t;
3310 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
3311 if (elem == t)
3312 return true;
3313 return false;
3314 }
3315
3316 /* Returns element number IDX (zero-origin) of chain CHAIN, or
3317 NULL_TREE. */
3318
3319 tree
3320 chain_index (int idx, tree chain)
3321 {
3322 for (; chain && idx > 0; --idx)
3323 chain = TREE_CHAIN (chain);
3324 return chain;
3325 }
3326
3327 /* Return nonzero if ELEM is part of the chain CHAIN. */
3328
3329 bool
3330 chain_member (const_tree elem, const_tree chain)
3331 {
3332 while (chain)
3333 {
3334 if (elem == chain)
3335 return true;
3336 chain = DECL_CHAIN (chain);
3337 }
3338
3339 return false;
3340 }
3341
3342 /* Return the length of a chain of nodes chained through TREE_CHAIN.
3343 We expect a null pointer to mark the end of the chain.
3344 This is the Lisp primitive `length'. */
3345
3346 int
3347 list_length (const_tree t)
3348 {
3349 const_tree p = t;
3350 #ifdef ENABLE_TREE_CHECKING
3351 const_tree q = t;
3352 #endif
3353 int len = 0;
3354
3355 while (p)
3356 {
3357 p = TREE_CHAIN (p);
3358 #ifdef ENABLE_TREE_CHECKING
3359 if (len % 2)
3360 q = TREE_CHAIN (q);
3361 gcc_assert (p != q);
3362 #endif
3363 len++;
3364 }
3365
3366 return len;
3367 }
3368
3369 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3370 UNION_TYPE TYPE, or NULL_TREE if none. */
3371
3372 tree
3373 first_field (const_tree type)
3374 {
3375 tree t = TYPE_FIELDS (type);
3376 while (t && TREE_CODE (t) != FIELD_DECL)
3377 t = TREE_CHAIN (t);
3378 return t;
3379 }
3380
3381 /* Returns the last FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3382 UNION_TYPE TYPE, or NULL_TREE if none. */
3383
3384 tree
3385 last_field (const_tree type)
3386 {
3387 tree last = NULL_TREE;
3388
3389 for (tree fld = TYPE_FIELDS (type); fld; fld = TREE_CHAIN (fld))
3390 {
3391 if (TREE_CODE (fld) != FIELD_DECL)
3392 continue;
3393
3394 last = fld;
3395 }
3396
3397 return last;
3398 }
3399
3400 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
3401 by modifying the last node in chain 1 to point to chain 2.
3402 This is the Lisp primitive `nconc'. */
3403
3404 tree
3405 chainon (tree op1, tree op2)
3406 {
3407 tree t1;
3408
3409 if (!op1)
3410 return op2;
3411 if (!op2)
3412 return op1;
3413
3414 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
3415 continue;
3416 TREE_CHAIN (t1) = op2;
3417
3418 #ifdef ENABLE_TREE_CHECKING
3419 {
3420 tree t2;
3421 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
3422 gcc_assert (t2 != t1);
3423 }
3424 #endif
3425
3426 return op1;
3427 }
3428
3429 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
3430
3431 tree
3432 tree_last (tree chain)
3433 {
3434 tree next;
3435 if (chain)
3436 while ((next = TREE_CHAIN (chain)))
3437 chain = next;
3438 return chain;
3439 }
3440
3441 /* Reverse the order of elements in the chain T,
3442 and return the new head of the chain (old last element). */
3443
3444 tree
3445 nreverse (tree t)
3446 {
3447 tree prev = 0, decl, next;
3448 for (decl = t; decl; decl = next)
3449 {
3450 /* We shouldn't be using this function to reverse BLOCK chains; we
3451 have blocks_nreverse for that. */
3452 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
3453 next = TREE_CHAIN (decl);
3454 TREE_CHAIN (decl) = prev;
3455 prev = decl;
3456 }
3457 return prev;
3458 }
3459 \f
3460 /* Return a newly created TREE_LIST node whose
3461 purpose and value fields are PARM and VALUE. */
3462
3463 tree
3464 build_tree_list (tree parm, tree value MEM_STAT_DECL)
3465 {
3466 tree t = make_node (TREE_LIST PASS_MEM_STAT);
3467 TREE_PURPOSE (t) = parm;
3468 TREE_VALUE (t) = value;
3469 return t;
3470 }
3471
3472 /* Build a chain of TREE_LIST nodes from a vector. */
3473
3474 tree
3475 build_tree_list_vec (const vec<tree, va_gc> *vec MEM_STAT_DECL)
3476 {
3477 tree ret = NULL_TREE;
3478 tree *pp = &ret;
3479 unsigned int i;
3480 tree t;
3481 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
3482 {
3483 *pp = build_tree_list (NULL, t PASS_MEM_STAT);
3484 pp = &TREE_CHAIN (*pp);
3485 }
3486 return ret;
3487 }
3488
3489 /* Return a newly created TREE_LIST node whose
3490 purpose and value fields are PURPOSE and VALUE
3491 and whose TREE_CHAIN is CHAIN. */
3492
3493 tree
3494 tree_cons (tree purpose, tree value, tree chain MEM_STAT_DECL)
3495 {
3496 tree node;
3497
3498 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
3499 memset (node, 0, sizeof (struct tree_common));
3500
3501 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
3502
3503 TREE_SET_CODE (node, TREE_LIST);
3504 TREE_CHAIN (node) = chain;
3505 TREE_PURPOSE (node) = purpose;
3506 TREE_VALUE (node) = value;
3507 return node;
3508 }
3509
3510 /* Return the values of the elements of a CONSTRUCTOR as a vector of
3511 trees. */
3512
3513 vec<tree, va_gc> *
3514 ctor_to_vec (tree ctor)
3515 {
3516 vec<tree, va_gc> *vec;
3517 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
3518 unsigned int ix;
3519 tree val;
3520
3521 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
3522 vec->quick_push (val);
3523
3524 return vec;
3525 }
3526 \f
3527 /* Return the size nominally occupied by an object of type TYPE
3528 when it resides in memory. The value is measured in units of bytes,
3529 and its data type is that normally used for type sizes
3530 (which is the first type created by make_signed_type or
3531 make_unsigned_type). */
3532
3533 tree
3534 size_in_bytes_loc (location_t loc, const_tree type)
3535 {
3536 tree t;
3537
3538 if (type == error_mark_node)
3539 return integer_zero_node;
3540
3541 type = TYPE_MAIN_VARIANT (type);
3542 t = TYPE_SIZE_UNIT (type);
3543
3544 if (t == 0)
3545 {
3546 lang_hooks.types.incomplete_type_error (loc, NULL_TREE, type);
3547 return size_zero_node;
3548 }
3549
3550 return t;
3551 }
3552
3553 /* Return the size of TYPE (in bytes) as a wide integer
3554 or return -1 if the size can vary or is larger than an integer. */
3555
3556 HOST_WIDE_INT
3557 int_size_in_bytes (const_tree type)
3558 {
3559 tree t;
3560
3561 if (type == error_mark_node)
3562 return 0;
3563
3564 type = TYPE_MAIN_VARIANT (type);
3565 t = TYPE_SIZE_UNIT (type);
3566
3567 if (t && tree_fits_uhwi_p (t))
3568 return TREE_INT_CST_LOW (t);
3569 else
3570 return -1;
3571 }
3572
3573 /* Return the maximum size of TYPE (in bytes) as a wide integer
3574 or return -1 if the size can vary or is larger than an integer. */
3575
3576 HOST_WIDE_INT
3577 max_int_size_in_bytes (const_tree type)
3578 {
3579 HOST_WIDE_INT size = -1;
3580 tree size_tree;
3581
3582 /* If this is an array type, check for a possible MAX_SIZE attached. */
3583
3584 if (TREE_CODE (type) == ARRAY_TYPE)
3585 {
3586 size_tree = TYPE_ARRAY_MAX_SIZE (type);
3587
3588 if (size_tree && tree_fits_uhwi_p (size_tree))
3589 size = tree_to_uhwi (size_tree);
3590 }
3591
3592 /* If we still haven't been able to get a size, see if the language
3593 can compute a maximum size. */
3594
3595 if (size == -1)
3596 {
3597 size_tree = lang_hooks.types.max_size (type);
3598
3599 if (size_tree && tree_fits_uhwi_p (size_tree))
3600 size = tree_to_uhwi (size_tree);
3601 }
3602
3603 return size;
3604 }
3605 \f
3606 /* Return the bit position of FIELD, in bits from the start of the record.
3607 This is a tree of type bitsizetype. */
3608
3609 tree
3610 bit_position (const_tree field)
3611 {
3612 return bit_from_pos (DECL_FIELD_OFFSET (field),
3613 DECL_FIELD_BIT_OFFSET (field));
3614 }
3615 \f
3616 /* Return the byte position of FIELD, in bytes from the start of the record.
3617 This is a tree of type sizetype. */
3618
3619 tree
3620 byte_position (const_tree field)
3621 {
3622 return byte_from_pos (DECL_FIELD_OFFSET (field),
3623 DECL_FIELD_BIT_OFFSET (field));
3624 }
3625
3626 /* Likewise, but return as an integer. It must be representable in
3627 that way (since it could be a signed value, we don't have the
3628 option of returning -1 like int_size_in_byte can. */
3629
3630 HOST_WIDE_INT
3631 int_byte_position (const_tree field)
3632 {
3633 return tree_to_shwi (byte_position (field));
3634 }
3635 \f
3636 /* Return, as a tree node, the number of elements for TYPE (which is an
3637 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3638
3639 tree
3640 array_type_nelts (const_tree type)
3641 {
3642 tree index_type, min, max;
3643
3644 /* If they did it with unspecified bounds, then we should have already
3645 given an error about it before we got here. */
3646 if (! TYPE_DOMAIN (type))
3647 return error_mark_node;
3648
3649 index_type = TYPE_DOMAIN (type);
3650 min = TYPE_MIN_VALUE (index_type);
3651 max = TYPE_MAX_VALUE (index_type);
3652
3653 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3654 if (!max)
3655 {
3656 /* zero sized arrays are represented from C FE as complete types with
3657 NULL TYPE_MAX_VALUE and zero TYPE_SIZE, while C++ FE represents
3658 them as min 0, max -1. */
3659 if (COMPLETE_TYPE_P (type)
3660 && integer_zerop (TYPE_SIZE (type))
3661 && integer_zerop (min))
3662 return build_int_cst (TREE_TYPE (min), -1);
3663
3664 return error_mark_node;
3665 }
3666
3667 return (integer_zerop (min)
3668 ? max
3669 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3670 }
3671 \f
3672 /* If arg is static -- a reference to an object in static storage -- then
3673 return the object. This is not the same as the C meaning of `static'.
3674 If arg isn't static, return NULL. */
3675
3676 tree
3677 staticp (tree arg)
3678 {
3679 switch (TREE_CODE (arg))
3680 {
3681 case FUNCTION_DECL:
3682 /* Nested functions are static, even though taking their address will
3683 involve a trampoline as we unnest the nested function and create
3684 the trampoline on the tree level. */
3685 return arg;
3686
3687 case VAR_DECL:
3688 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3689 && ! DECL_THREAD_LOCAL_P (arg)
3690 && ! DECL_DLLIMPORT_P (arg)
3691 ? arg : NULL);
3692
3693 case CONST_DECL:
3694 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3695 ? arg : NULL);
3696
3697 case CONSTRUCTOR:
3698 return TREE_STATIC (arg) ? arg : NULL;
3699
3700 case LABEL_DECL:
3701 case STRING_CST:
3702 return arg;
3703
3704 case COMPONENT_REF:
3705 /* If the thing being referenced is not a field, then it is
3706 something language specific. */
3707 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3708
3709 /* If we are referencing a bitfield, we can't evaluate an
3710 ADDR_EXPR at compile time and so it isn't a constant. */
3711 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3712 return NULL;
3713
3714 return staticp (TREE_OPERAND (arg, 0));
3715
3716 case BIT_FIELD_REF:
3717 return NULL;
3718
3719 case INDIRECT_REF:
3720 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3721
3722 case ARRAY_REF:
3723 case ARRAY_RANGE_REF:
3724 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3725 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3726 return staticp (TREE_OPERAND (arg, 0));
3727 else
3728 return NULL;
3729
3730 case COMPOUND_LITERAL_EXPR:
3731 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3732
3733 default:
3734 return NULL;
3735 }
3736 }
3737
3738 \f
3739
3740
3741 /* Return whether OP is a DECL whose address is function-invariant. */
3742
3743 bool
3744 decl_address_invariant_p (const_tree op)
3745 {
3746 /* The conditions below are slightly less strict than the one in
3747 staticp. */
3748
3749 switch (TREE_CODE (op))
3750 {
3751 case PARM_DECL:
3752 case RESULT_DECL:
3753 case LABEL_DECL:
3754 case FUNCTION_DECL:
3755 return true;
3756
3757 case VAR_DECL:
3758 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3759 || DECL_THREAD_LOCAL_P (op)
3760 || DECL_CONTEXT (op) == current_function_decl
3761 || decl_function_context (op) == current_function_decl)
3762 return true;
3763 break;
3764
3765 case CONST_DECL:
3766 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3767 || decl_function_context (op) == current_function_decl)
3768 return true;
3769 break;
3770
3771 default:
3772 break;
3773 }
3774
3775 return false;
3776 }
3777
3778 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3779
3780 bool
3781 decl_address_ip_invariant_p (const_tree op)
3782 {
3783 /* The conditions below are slightly less strict than the one in
3784 staticp. */
3785
3786 switch (TREE_CODE (op))
3787 {
3788 case LABEL_DECL:
3789 case FUNCTION_DECL:
3790 case STRING_CST:
3791 return true;
3792
3793 case VAR_DECL:
3794 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3795 && !DECL_DLLIMPORT_P (op))
3796 || DECL_THREAD_LOCAL_P (op))
3797 return true;
3798 break;
3799
3800 case CONST_DECL:
3801 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3802 return true;
3803 break;
3804
3805 default:
3806 break;
3807 }
3808
3809 return false;
3810 }
3811
3812
3813 /* Return true if T is function-invariant (internal function, does
3814 not handle arithmetic; that's handled in skip_simple_arithmetic and
3815 tree_invariant_p). */
3816
3817 static bool
3818 tree_invariant_p_1 (tree t)
3819 {
3820 tree op;
3821
3822 if (TREE_CONSTANT (t)
3823 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3824 return true;
3825
3826 switch (TREE_CODE (t))
3827 {
3828 case SAVE_EXPR:
3829 return true;
3830
3831 case ADDR_EXPR:
3832 op = TREE_OPERAND (t, 0);
3833 while (handled_component_p (op))
3834 {
3835 switch (TREE_CODE (op))
3836 {
3837 case ARRAY_REF:
3838 case ARRAY_RANGE_REF:
3839 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3840 || TREE_OPERAND (op, 2) != NULL_TREE
3841 || TREE_OPERAND (op, 3) != NULL_TREE)
3842 return false;
3843 break;
3844
3845 case COMPONENT_REF:
3846 if (TREE_OPERAND (op, 2) != NULL_TREE)
3847 return false;
3848 break;
3849
3850 default:;
3851 }
3852 op = TREE_OPERAND (op, 0);
3853 }
3854
3855 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3856
3857 default:
3858 break;
3859 }
3860
3861 return false;
3862 }
3863
3864 /* Return true if T is function-invariant. */
3865
3866 bool
3867 tree_invariant_p (tree t)
3868 {
3869 tree inner = skip_simple_arithmetic (t);
3870 return tree_invariant_p_1 (inner);
3871 }
3872
3873 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3874 Do this to any expression which may be used in more than one place,
3875 but must be evaluated only once.
3876
3877 Normally, expand_expr would reevaluate the expression each time.
3878 Calling save_expr produces something that is evaluated and recorded
3879 the first time expand_expr is called on it. Subsequent calls to
3880 expand_expr just reuse the recorded value.
3881
3882 The call to expand_expr that generates code that actually computes
3883 the value is the first call *at compile time*. Subsequent calls
3884 *at compile time* generate code to use the saved value.
3885 This produces correct result provided that *at run time* control
3886 always flows through the insns made by the first expand_expr
3887 before reaching the other places where the save_expr was evaluated.
3888 You, the caller of save_expr, must make sure this is so.
3889
3890 Constants, and certain read-only nodes, are returned with no
3891 SAVE_EXPR because that is safe. Expressions containing placeholders
3892 are not touched; see tree.def for an explanation of what these
3893 are used for. */
3894
3895 tree
3896 save_expr (tree expr)
3897 {
3898 tree inner;
3899
3900 /* If the tree evaluates to a constant, then we don't want to hide that
3901 fact (i.e. this allows further folding, and direct checks for constants).
3902 However, a read-only object that has side effects cannot be bypassed.
3903 Since it is no problem to reevaluate literals, we just return the
3904 literal node. */
3905 inner = skip_simple_arithmetic (expr);
3906 if (TREE_CODE (inner) == ERROR_MARK)
3907 return inner;
3908
3909 if (tree_invariant_p_1 (inner))
3910 return expr;
3911
3912 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3913 it means that the size or offset of some field of an object depends on
3914 the value within another field.
3915
3916 Note that it must not be the case that EXPR contains both a PLACEHOLDER_EXPR
3917 and some variable since it would then need to be both evaluated once and
3918 evaluated more than once. Front-ends must assure this case cannot
3919 happen by surrounding any such subexpressions in their own SAVE_EXPR
3920 and forcing evaluation at the proper time. */
3921 if (contains_placeholder_p (inner))
3922 return expr;
3923
3924 expr = build1_loc (EXPR_LOCATION (expr), SAVE_EXPR, TREE_TYPE (expr), expr);
3925
3926 /* This expression might be placed ahead of a jump to ensure that the
3927 value was computed on both sides of the jump. So make sure it isn't
3928 eliminated as dead. */
3929 TREE_SIDE_EFFECTS (expr) = 1;
3930 return expr;
3931 }
3932
3933 /* Look inside EXPR into any simple arithmetic operations. Return the
3934 outermost non-arithmetic or non-invariant node. */
3935
3936 tree
3937 skip_simple_arithmetic (tree expr)
3938 {
3939 /* We don't care about whether this can be used as an lvalue in this
3940 context. */
3941 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3942 expr = TREE_OPERAND (expr, 0);
3943
3944 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3945 a constant, it will be more efficient to not make another SAVE_EXPR since
3946 it will allow better simplification and GCSE will be able to merge the
3947 computations if they actually occur. */
3948 while (true)
3949 {
3950 if (UNARY_CLASS_P (expr))
3951 expr = TREE_OPERAND (expr, 0);
3952 else if (BINARY_CLASS_P (expr))
3953 {
3954 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3955 expr = TREE_OPERAND (expr, 0);
3956 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3957 expr = TREE_OPERAND (expr, 1);
3958 else
3959 break;
3960 }
3961 else
3962 break;
3963 }
3964
3965 return expr;
3966 }
3967
3968 /* Look inside EXPR into simple arithmetic operations involving constants.
3969 Return the outermost non-arithmetic or non-constant node. */
3970
3971 tree
3972 skip_simple_constant_arithmetic (tree expr)
3973 {
3974 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3975 expr = TREE_OPERAND (expr, 0);
3976
3977 while (true)
3978 {
3979 if (UNARY_CLASS_P (expr))
3980 expr = TREE_OPERAND (expr, 0);
3981 else if (BINARY_CLASS_P (expr))
3982 {
3983 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3984 expr = TREE_OPERAND (expr, 0);
3985 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3986 expr = TREE_OPERAND (expr, 1);
3987 else
3988 break;
3989 }
3990 else
3991 break;
3992 }
3993
3994 return expr;
3995 }
3996
3997 /* Return which tree structure is used by T. */
3998
3999 enum tree_node_structure_enum
4000 tree_node_structure (const_tree t)
4001 {
4002 const enum tree_code code = TREE_CODE (t);
4003 return tree_node_structure_for_code (code);
4004 }
4005
4006 /* Set various status flags when building a CALL_EXPR object T. */
4007
4008 static void
4009 process_call_operands (tree t)
4010 {
4011 bool side_effects = TREE_SIDE_EFFECTS (t);
4012 bool read_only = false;
4013 int i = call_expr_flags (t);
4014
4015 /* Calls have side-effects, except those to const or pure functions. */
4016 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
4017 side_effects = true;
4018 /* Propagate TREE_READONLY of arguments for const functions. */
4019 if (i & ECF_CONST)
4020 read_only = true;
4021
4022 if (!side_effects || read_only)
4023 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
4024 {
4025 tree op = TREE_OPERAND (t, i);
4026 if (op && TREE_SIDE_EFFECTS (op))
4027 side_effects = true;
4028 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
4029 read_only = false;
4030 }
4031
4032 TREE_SIDE_EFFECTS (t) = side_effects;
4033 TREE_READONLY (t) = read_only;
4034 }
4035 \f
4036 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
4037 size or offset that depends on a field within a record. */
4038
4039 bool
4040 contains_placeholder_p (const_tree exp)
4041 {
4042 enum tree_code code;
4043
4044 if (!exp)
4045 return 0;
4046
4047 code = TREE_CODE (exp);
4048 if (code == PLACEHOLDER_EXPR)
4049 return 1;
4050
4051 switch (TREE_CODE_CLASS (code))
4052 {
4053 case tcc_reference:
4054 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
4055 position computations since they will be converted into a
4056 WITH_RECORD_EXPR involving the reference, which will assume
4057 here will be valid. */
4058 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
4059
4060 case tcc_exceptional:
4061 if (code == TREE_LIST)
4062 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
4063 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
4064 break;
4065
4066 case tcc_unary:
4067 case tcc_binary:
4068 case tcc_comparison:
4069 case tcc_expression:
4070 switch (code)
4071 {
4072 case COMPOUND_EXPR:
4073 /* Ignoring the first operand isn't quite right, but works best. */
4074 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
4075
4076 case COND_EXPR:
4077 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
4078 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
4079 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
4080
4081 case SAVE_EXPR:
4082 /* The save_expr function never wraps anything containing
4083 a PLACEHOLDER_EXPR. */
4084 return 0;
4085
4086 default:
4087 break;
4088 }
4089
4090 switch (TREE_CODE_LENGTH (code))
4091 {
4092 case 1:
4093 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
4094 case 2:
4095 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
4096 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
4097 default:
4098 return 0;
4099 }
4100
4101 case tcc_vl_exp:
4102 switch (code)
4103 {
4104 case CALL_EXPR:
4105 {
4106 const_tree arg;
4107 const_call_expr_arg_iterator iter;
4108 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
4109 if (CONTAINS_PLACEHOLDER_P (arg))
4110 return 1;
4111 return 0;
4112 }
4113 default:
4114 return 0;
4115 }
4116
4117 default:
4118 return 0;
4119 }
4120 return 0;
4121 }
4122
4123 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
4124 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
4125 field positions. */
4126
4127 static bool
4128 type_contains_placeholder_1 (const_tree type)
4129 {
4130 /* If the size contains a placeholder or the parent type (component type in
4131 the case of arrays) type involves a placeholder, this type does. */
4132 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
4133 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
4134 || (!POINTER_TYPE_P (type)
4135 && TREE_TYPE (type)
4136 && type_contains_placeholder_p (TREE_TYPE (type))))
4137 return true;
4138
4139 /* Now do type-specific checks. Note that the last part of the check above
4140 greatly limits what we have to do below. */
4141 switch (TREE_CODE (type))
4142 {
4143 case VOID_TYPE:
4144 case OPAQUE_TYPE:
4145 case COMPLEX_TYPE:
4146 case ENUMERAL_TYPE:
4147 case BOOLEAN_TYPE:
4148 case POINTER_TYPE:
4149 case OFFSET_TYPE:
4150 case REFERENCE_TYPE:
4151 case METHOD_TYPE:
4152 case FUNCTION_TYPE:
4153 case VECTOR_TYPE:
4154 case NULLPTR_TYPE:
4155 return false;
4156
4157 case INTEGER_TYPE:
4158 case REAL_TYPE:
4159 case FIXED_POINT_TYPE:
4160 /* Here we just check the bounds. */
4161 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
4162 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
4163
4164 case ARRAY_TYPE:
4165 /* We have already checked the component type above, so just check
4166 the domain type. Flexible array members have a null domain. */
4167 return TYPE_DOMAIN (type) ?
4168 type_contains_placeholder_p (TYPE_DOMAIN (type)) : false;
4169
4170 case RECORD_TYPE:
4171 case UNION_TYPE:
4172 case QUAL_UNION_TYPE:
4173 {
4174 tree field;
4175
4176 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4177 if (TREE_CODE (field) == FIELD_DECL
4178 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
4179 || (TREE_CODE (type) == QUAL_UNION_TYPE
4180 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
4181 || type_contains_placeholder_p (TREE_TYPE (field))))
4182 return true;
4183
4184 return false;
4185 }
4186
4187 default:
4188 gcc_unreachable ();
4189 }
4190 }
4191
4192 /* Wrapper around above function used to cache its result. */
4193
4194 bool
4195 type_contains_placeholder_p (tree type)
4196 {
4197 bool result;
4198
4199 /* If the contains_placeholder_bits field has been initialized,
4200 then we know the answer. */
4201 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
4202 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
4203
4204 /* Indicate that we've seen this type node, and the answer is false.
4205 This is what we want to return if we run into recursion via fields. */
4206 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
4207
4208 /* Compute the real value. */
4209 result = type_contains_placeholder_1 (type);
4210
4211 /* Store the real value. */
4212 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
4213
4214 return result;
4215 }
4216 \f
4217 /* Push tree EXP onto vector QUEUE if it is not already present. */
4218
4219 static void
4220 push_without_duplicates (tree exp, vec<tree> *queue)
4221 {
4222 unsigned int i;
4223 tree iter;
4224
4225 FOR_EACH_VEC_ELT (*queue, i, iter)
4226 if (simple_cst_equal (iter, exp) == 1)
4227 break;
4228
4229 if (!iter)
4230 queue->safe_push (exp);
4231 }
4232
4233 /* Given a tree EXP, find all occurrences of references to fields
4234 in a PLACEHOLDER_EXPR and place them in vector REFS without
4235 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
4236 we assume here that EXP contains only arithmetic expressions
4237 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
4238 argument list. */
4239
4240 void
4241 find_placeholder_in_expr (tree exp, vec<tree> *refs)
4242 {
4243 enum tree_code code = TREE_CODE (exp);
4244 tree inner;
4245 int i;
4246
4247 /* We handle TREE_LIST and COMPONENT_REF separately. */
4248 if (code == TREE_LIST)
4249 {
4250 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
4251 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
4252 }
4253 else if (code == COMPONENT_REF)
4254 {
4255 for (inner = TREE_OPERAND (exp, 0);
4256 REFERENCE_CLASS_P (inner);
4257 inner = TREE_OPERAND (inner, 0))
4258 ;
4259
4260 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
4261 push_without_duplicates (exp, refs);
4262 else
4263 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
4264 }
4265 else
4266 switch (TREE_CODE_CLASS (code))
4267 {
4268 case tcc_constant:
4269 break;
4270
4271 case tcc_declaration:
4272 /* Variables allocated to static storage can stay. */
4273 if (!TREE_STATIC (exp))
4274 push_without_duplicates (exp, refs);
4275 break;
4276
4277 case tcc_expression:
4278 /* This is the pattern built in ada/make_aligning_type. */
4279 if (code == ADDR_EXPR
4280 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
4281 {
4282 push_without_duplicates (exp, refs);
4283 break;
4284 }
4285
4286 /* Fall through. */
4287
4288 case tcc_exceptional:
4289 case tcc_unary:
4290 case tcc_binary:
4291 case tcc_comparison:
4292 case tcc_reference:
4293 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
4294 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4295 break;
4296
4297 case tcc_vl_exp:
4298 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4299 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4300 break;
4301
4302 default:
4303 gcc_unreachable ();
4304 }
4305 }
4306
4307 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
4308 return a tree with all occurrences of references to F in a
4309 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
4310 CONST_DECLs. Note that we assume here that EXP contains only
4311 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
4312 occurring only in their argument list. */
4313
4314 tree
4315 substitute_in_expr (tree exp, tree f, tree r)
4316 {
4317 enum tree_code code = TREE_CODE (exp);
4318 tree op0, op1, op2, op3;
4319 tree new_tree;
4320
4321 /* We handle TREE_LIST and COMPONENT_REF separately. */
4322 if (code == TREE_LIST)
4323 {
4324 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
4325 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
4326 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4327 return exp;
4328
4329 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4330 }
4331 else if (code == COMPONENT_REF)
4332 {
4333 tree inner;
4334
4335 /* If this expression is getting a value from a PLACEHOLDER_EXPR
4336 and it is the right field, replace it with R. */
4337 for (inner = TREE_OPERAND (exp, 0);
4338 REFERENCE_CLASS_P (inner);
4339 inner = TREE_OPERAND (inner, 0))
4340 ;
4341
4342 /* The field. */
4343 op1 = TREE_OPERAND (exp, 1);
4344
4345 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
4346 return r;
4347
4348 /* If this expression hasn't been completed let, leave it alone. */
4349 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
4350 return exp;
4351
4352 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4353 if (op0 == TREE_OPERAND (exp, 0))
4354 return exp;
4355
4356 new_tree
4357 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
4358 }
4359 else
4360 switch (TREE_CODE_CLASS (code))
4361 {
4362 case tcc_constant:
4363 return exp;
4364
4365 case tcc_declaration:
4366 if (exp == f)
4367 return r;
4368 else
4369 return exp;
4370
4371 case tcc_expression:
4372 if (exp == f)
4373 return r;
4374
4375 /* Fall through. */
4376
4377 case tcc_exceptional:
4378 case tcc_unary:
4379 case tcc_binary:
4380 case tcc_comparison:
4381 case tcc_reference:
4382 switch (TREE_CODE_LENGTH (code))
4383 {
4384 case 0:
4385 return exp;
4386
4387 case 1:
4388 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4389 if (op0 == TREE_OPERAND (exp, 0))
4390 return exp;
4391
4392 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4393 break;
4394
4395 case 2:
4396 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4397 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4398
4399 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4400 return exp;
4401
4402 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4403 break;
4404
4405 case 3:
4406 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4407 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4408 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4409
4410 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4411 && op2 == TREE_OPERAND (exp, 2))
4412 return exp;
4413
4414 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4415 break;
4416
4417 case 4:
4418 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4419 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4420 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4421 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
4422
4423 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4424 && op2 == TREE_OPERAND (exp, 2)
4425 && op3 == TREE_OPERAND (exp, 3))
4426 return exp;
4427
4428 new_tree
4429 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4430 break;
4431
4432 default:
4433 gcc_unreachable ();
4434 }
4435 break;
4436
4437 case tcc_vl_exp:
4438 {
4439 int i;
4440
4441 new_tree = NULL_TREE;
4442
4443 /* If we are trying to replace F with a constant or with another
4444 instance of one of the arguments of the call, inline back
4445 functions which do nothing else than computing a value from
4446 the arguments they are passed. This makes it possible to
4447 fold partially or entirely the replacement expression. */
4448 if (code == CALL_EXPR)
4449 {
4450 bool maybe_inline = false;
4451 if (CONSTANT_CLASS_P (r))
4452 maybe_inline = true;
4453 else
4454 for (i = 3; i < TREE_OPERAND_LENGTH (exp); i++)
4455 if (operand_equal_p (TREE_OPERAND (exp, i), r, 0))
4456 {
4457 maybe_inline = true;
4458 break;
4459 }
4460 if (maybe_inline)
4461 {
4462 tree t = maybe_inline_call_in_expr (exp);
4463 if (t)
4464 return SUBSTITUTE_IN_EXPR (t, f, r);
4465 }
4466 }
4467
4468 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4469 {
4470 tree op = TREE_OPERAND (exp, i);
4471 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
4472 if (new_op != op)
4473 {
4474 if (!new_tree)
4475 new_tree = copy_node (exp);
4476 TREE_OPERAND (new_tree, i) = new_op;
4477 }
4478 }
4479
4480 if (new_tree)
4481 {
4482 new_tree = fold (new_tree);
4483 if (TREE_CODE (new_tree) == CALL_EXPR)
4484 process_call_operands (new_tree);
4485 }
4486 else
4487 return exp;
4488 }
4489 break;
4490
4491 default:
4492 gcc_unreachable ();
4493 }
4494
4495 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4496
4497 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4498 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4499
4500 return new_tree;
4501 }
4502
4503 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
4504 for it within OBJ, a tree that is an object or a chain of references. */
4505
4506 tree
4507 substitute_placeholder_in_expr (tree exp, tree obj)
4508 {
4509 enum tree_code code = TREE_CODE (exp);
4510 tree op0, op1, op2, op3;
4511 tree new_tree;
4512
4513 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
4514 in the chain of OBJ. */
4515 if (code == PLACEHOLDER_EXPR)
4516 {
4517 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
4518 tree elt;
4519
4520 for (elt = obj; elt != 0;
4521 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4522 || TREE_CODE (elt) == COND_EXPR)
4523 ? TREE_OPERAND (elt, 1)
4524 : (REFERENCE_CLASS_P (elt)
4525 || UNARY_CLASS_P (elt)
4526 || BINARY_CLASS_P (elt)
4527 || VL_EXP_CLASS_P (elt)
4528 || EXPRESSION_CLASS_P (elt))
4529 ? TREE_OPERAND (elt, 0) : 0))
4530 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
4531 return elt;
4532
4533 for (elt = obj; elt != 0;
4534 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4535 || TREE_CODE (elt) == COND_EXPR)
4536 ? TREE_OPERAND (elt, 1)
4537 : (REFERENCE_CLASS_P (elt)
4538 || UNARY_CLASS_P (elt)
4539 || BINARY_CLASS_P (elt)
4540 || VL_EXP_CLASS_P (elt)
4541 || EXPRESSION_CLASS_P (elt))
4542 ? TREE_OPERAND (elt, 0) : 0))
4543 if (POINTER_TYPE_P (TREE_TYPE (elt))
4544 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
4545 == need_type))
4546 return fold_build1 (INDIRECT_REF, need_type, elt);
4547
4548 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
4549 survives until RTL generation, there will be an error. */
4550 return exp;
4551 }
4552
4553 /* TREE_LIST is special because we need to look at TREE_VALUE
4554 and TREE_CHAIN, not TREE_OPERANDS. */
4555 else if (code == TREE_LIST)
4556 {
4557 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
4558 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
4559 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4560 return exp;
4561
4562 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4563 }
4564 else
4565 switch (TREE_CODE_CLASS (code))
4566 {
4567 case tcc_constant:
4568 case tcc_declaration:
4569 return exp;
4570
4571 case tcc_exceptional:
4572 case tcc_unary:
4573 case tcc_binary:
4574 case tcc_comparison:
4575 case tcc_expression:
4576 case tcc_reference:
4577 case tcc_statement:
4578 switch (TREE_CODE_LENGTH (code))
4579 {
4580 case 0:
4581 return exp;
4582
4583 case 1:
4584 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4585 if (op0 == TREE_OPERAND (exp, 0))
4586 return exp;
4587
4588 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4589 break;
4590
4591 case 2:
4592 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4593 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4594
4595 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4596 return exp;
4597
4598 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4599 break;
4600
4601 case 3:
4602 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4603 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4604 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4605
4606 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4607 && op2 == TREE_OPERAND (exp, 2))
4608 return exp;
4609
4610 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4611 break;
4612
4613 case 4:
4614 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4615 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4616 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4617 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
4618
4619 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4620 && op2 == TREE_OPERAND (exp, 2)
4621 && op3 == TREE_OPERAND (exp, 3))
4622 return exp;
4623
4624 new_tree
4625 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4626 break;
4627
4628 default:
4629 gcc_unreachable ();
4630 }
4631 break;
4632
4633 case tcc_vl_exp:
4634 {
4635 int i;
4636
4637 new_tree = NULL_TREE;
4638
4639 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4640 {
4641 tree op = TREE_OPERAND (exp, i);
4642 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
4643 if (new_op != op)
4644 {
4645 if (!new_tree)
4646 new_tree = copy_node (exp);
4647 TREE_OPERAND (new_tree, i) = new_op;
4648 }
4649 }
4650
4651 if (new_tree)
4652 {
4653 new_tree = fold (new_tree);
4654 if (TREE_CODE (new_tree) == CALL_EXPR)
4655 process_call_operands (new_tree);
4656 }
4657 else
4658 return exp;
4659 }
4660 break;
4661
4662 default:
4663 gcc_unreachable ();
4664 }
4665
4666 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4667
4668 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4669 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4670
4671 return new_tree;
4672 }
4673 \f
4674
4675 /* Subroutine of stabilize_reference; this is called for subtrees of
4676 references. Any expression with side-effects must be put in a SAVE_EXPR
4677 to ensure that it is only evaluated once.
4678
4679 We don't put SAVE_EXPR nodes around everything, because assigning very
4680 simple expressions to temporaries causes us to miss good opportunities
4681 for optimizations. Among other things, the opportunity to fold in the
4682 addition of a constant into an addressing mode often gets lost, e.g.
4683 "y[i+1] += x;". In general, we take the approach that we should not make
4684 an assignment unless we are forced into it - i.e., that any non-side effect
4685 operator should be allowed, and that cse should take care of coalescing
4686 multiple utterances of the same expression should that prove fruitful. */
4687
4688 static tree
4689 stabilize_reference_1 (tree e)
4690 {
4691 tree result;
4692 enum tree_code code = TREE_CODE (e);
4693
4694 /* We cannot ignore const expressions because it might be a reference
4695 to a const array but whose index contains side-effects. But we can
4696 ignore things that are actual constant or that already have been
4697 handled by this function. */
4698
4699 if (tree_invariant_p (e))
4700 return e;
4701
4702 switch (TREE_CODE_CLASS (code))
4703 {
4704 case tcc_exceptional:
4705 /* Always wrap STATEMENT_LIST into SAVE_EXPR, even if it doesn't
4706 have side-effects. */
4707 if (code == STATEMENT_LIST)
4708 return save_expr (e);
4709 /* FALLTHRU */
4710 case tcc_type:
4711 case tcc_declaration:
4712 case tcc_comparison:
4713 case tcc_statement:
4714 case tcc_expression:
4715 case tcc_reference:
4716 case tcc_vl_exp:
4717 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4718 so that it will only be evaluated once. */
4719 /* The reference (r) and comparison (<) classes could be handled as
4720 below, but it is generally faster to only evaluate them once. */
4721 if (TREE_SIDE_EFFECTS (e))
4722 return save_expr (e);
4723 return e;
4724
4725 case tcc_constant:
4726 /* Constants need no processing. In fact, we should never reach
4727 here. */
4728 return e;
4729
4730 case tcc_binary:
4731 /* Division is slow and tends to be compiled with jumps,
4732 especially the division by powers of 2 that is often
4733 found inside of an array reference. So do it just once. */
4734 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4735 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4736 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4737 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4738 return save_expr (e);
4739 /* Recursively stabilize each operand. */
4740 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4741 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4742 break;
4743
4744 case tcc_unary:
4745 /* Recursively stabilize each operand. */
4746 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4747 break;
4748
4749 default:
4750 gcc_unreachable ();
4751 }
4752
4753 TREE_TYPE (result) = TREE_TYPE (e);
4754 TREE_READONLY (result) = TREE_READONLY (e);
4755 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4756 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4757
4758 return result;
4759 }
4760
4761 /* Stabilize a reference so that we can use it any number of times
4762 without causing its operands to be evaluated more than once.
4763 Returns the stabilized reference. This works by means of save_expr,
4764 so see the caveats in the comments about save_expr.
4765
4766 Also allows conversion expressions whose operands are references.
4767 Any other kind of expression is returned unchanged. */
4768
4769 tree
4770 stabilize_reference (tree ref)
4771 {
4772 tree result;
4773 enum tree_code code = TREE_CODE (ref);
4774
4775 switch (code)
4776 {
4777 case VAR_DECL:
4778 case PARM_DECL:
4779 case RESULT_DECL:
4780 /* No action is needed in this case. */
4781 return ref;
4782
4783 CASE_CONVERT:
4784 case FLOAT_EXPR:
4785 case FIX_TRUNC_EXPR:
4786 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4787 break;
4788
4789 case INDIRECT_REF:
4790 result = build_nt (INDIRECT_REF,
4791 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4792 break;
4793
4794 case COMPONENT_REF:
4795 result = build_nt (COMPONENT_REF,
4796 stabilize_reference (TREE_OPERAND (ref, 0)),
4797 TREE_OPERAND (ref, 1), NULL_TREE);
4798 break;
4799
4800 case BIT_FIELD_REF:
4801 result = build_nt (BIT_FIELD_REF,
4802 stabilize_reference (TREE_OPERAND (ref, 0)),
4803 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4804 REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
4805 break;
4806
4807 case ARRAY_REF:
4808 result = build_nt (ARRAY_REF,
4809 stabilize_reference (TREE_OPERAND (ref, 0)),
4810 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4811 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4812 break;
4813
4814 case ARRAY_RANGE_REF:
4815 result = build_nt (ARRAY_RANGE_REF,
4816 stabilize_reference (TREE_OPERAND (ref, 0)),
4817 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4818 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4819 break;
4820
4821 case COMPOUND_EXPR:
4822 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4823 it wouldn't be ignored. This matters when dealing with
4824 volatiles. */
4825 return stabilize_reference_1 (ref);
4826
4827 /* If arg isn't a kind of lvalue we recognize, make no change.
4828 Caller should recognize the error for an invalid lvalue. */
4829 default:
4830 return ref;
4831
4832 case ERROR_MARK:
4833 return error_mark_node;
4834 }
4835
4836 TREE_TYPE (result) = TREE_TYPE (ref);
4837 TREE_READONLY (result) = TREE_READONLY (ref);
4838 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4839 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4840 protected_set_expr_location (result, EXPR_LOCATION (ref));
4841
4842 return result;
4843 }
4844 \f
4845 /* Low-level constructors for expressions. */
4846
4847 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4848 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4849
4850 void
4851 recompute_tree_invariant_for_addr_expr (tree t)
4852 {
4853 tree node;
4854 bool tc = true, se = false;
4855
4856 gcc_assert (TREE_CODE (t) == ADDR_EXPR);
4857
4858 /* We started out assuming this address is both invariant and constant, but
4859 does not have side effects. Now go down any handled components and see if
4860 any of them involve offsets that are either non-constant or non-invariant.
4861 Also check for side-effects.
4862
4863 ??? Note that this code makes no attempt to deal with the case where
4864 taking the address of something causes a copy due to misalignment. */
4865
4866 #define UPDATE_FLAGS(NODE) \
4867 do { tree _node = (NODE); \
4868 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4869 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4870
4871 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4872 node = TREE_OPERAND (node, 0))
4873 {
4874 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4875 array reference (probably made temporarily by the G++ front end),
4876 so ignore all the operands. */
4877 if ((TREE_CODE (node) == ARRAY_REF
4878 || TREE_CODE (node) == ARRAY_RANGE_REF)
4879 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4880 {
4881 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4882 if (TREE_OPERAND (node, 2))
4883 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4884 if (TREE_OPERAND (node, 3))
4885 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4886 }
4887 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4888 FIELD_DECL, apparently. The G++ front end can put something else
4889 there, at least temporarily. */
4890 else if (TREE_CODE (node) == COMPONENT_REF
4891 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4892 {
4893 if (TREE_OPERAND (node, 2))
4894 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4895 }
4896 }
4897
4898 node = lang_hooks.expr_to_decl (node, &tc, &se);
4899
4900 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4901 the address, since &(*a)->b is a form of addition. If it's a constant, the
4902 address is constant too. If it's a decl, its address is constant if the
4903 decl is static. Everything else is not constant and, furthermore,
4904 taking the address of a volatile variable is not volatile. */
4905 if (TREE_CODE (node) == INDIRECT_REF
4906 || TREE_CODE (node) == MEM_REF)
4907 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4908 else if (CONSTANT_CLASS_P (node))
4909 ;
4910 else if (DECL_P (node))
4911 tc &= (staticp (node) != NULL_TREE);
4912 else
4913 {
4914 tc = false;
4915 se |= TREE_SIDE_EFFECTS (node);
4916 }
4917
4918
4919 TREE_CONSTANT (t) = tc;
4920 TREE_SIDE_EFFECTS (t) = se;
4921 #undef UPDATE_FLAGS
4922 }
4923
4924 /* Build an expression of code CODE, data type TYPE, and operands as
4925 specified. Expressions and reference nodes can be created this way.
4926 Constants, decls, types and misc nodes cannot be.
4927
4928 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4929 enough for all extant tree codes. */
4930
4931 tree
4932 build0 (enum tree_code code, tree tt MEM_STAT_DECL)
4933 {
4934 tree t;
4935
4936 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4937
4938 t = make_node (code PASS_MEM_STAT);
4939 TREE_TYPE (t) = tt;
4940
4941 return t;
4942 }
4943
4944 tree
4945 build1 (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4946 {
4947 int length = sizeof (struct tree_exp);
4948 tree t;
4949
4950 record_node_allocation_statistics (code, length);
4951
4952 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4953
4954 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4955
4956 memset (t, 0, sizeof (struct tree_common));
4957
4958 TREE_SET_CODE (t, code);
4959
4960 TREE_TYPE (t) = type;
4961 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4962 TREE_OPERAND (t, 0) = node;
4963 if (node && !TYPE_P (node))
4964 {
4965 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4966 TREE_READONLY (t) = TREE_READONLY (node);
4967 }
4968
4969 if (TREE_CODE_CLASS (code) == tcc_statement)
4970 {
4971 if (code != DEBUG_BEGIN_STMT)
4972 TREE_SIDE_EFFECTS (t) = 1;
4973 }
4974 else switch (code)
4975 {
4976 case VA_ARG_EXPR:
4977 /* All of these have side-effects, no matter what their
4978 operands are. */
4979 TREE_SIDE_EFFECTS (t) = 1;
4980 TREE_READONLY (t) = 0;
4981 break;
4982
4983 case INDIRECT_REF:
4984 /* Whether a dereference is readonly has nothing to do with whether
4985 its operand is readonly. */
4986 TREE_READONLY (t) = 0;
4987 break;
4988
4989 case ADDR_EXPR:
4990 if (node)
4991 recompute_tree_invariant_for_addr_expr (t);
4992 break;
4993
4994 default:
4995 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4996 && node && !TYPE_P (node)
4997 && TREE_CONSTANT (node))
4998 TREE_CONSTANT (t) = 1;
4999 if (TREE_CODE_CLASS (code) == tcc_reference
5000 && node && TREE_THIS_VOLATILE (node))
5001 TREE_THIS_VOLATILE (t) = 1;
5002 break;
5003 }
5004
5005 return t;
5006 }
5007
5008 #define PROCESS_ARG(N) \
5009 do { \
5010 TREE_OPERAND (t, N) = arg##N; \
5011 if (arg##N &&!TYPE_P (arg##N)) \
5012 { \
5013 if (TREE_SIDE_EFFECTS (arg##N)) \
5014 side_effects = 1; \
5015 if (!TREE_READONLY (arg##N) \
5016 && !CONSTANT_CLASS_P (arg##N)) \
5017 (void) (read_only = 0); \
5018 if (!TREE_CONSTANT (arg##N)) \
5019 (void) (constant = 0); \
5020 } \
5021 } while (0)
5022
5023 tree
5024 build2 (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
5025 {
5026 bool constant, read_only, side_effects, div_by_zero;
5027 tree t;
5028
5029 gcc_assert (TREE_CODE_LENGTH (code) == 2);
5030
5031 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
5032 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
5033 /* When sizetype precision doesn't match that of pointers
5034 we need to be able to build explicit extensions or truncations
5035 of the offset argument. */
5036 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
5037 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
5038 && TREE_CODE (arg1) == INTEGER_CST);
5039
5040 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
5041 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
5042 && ptrofftype_p (TREE_TYPE (arg1)));
5043
5044 t = make_node (code PASS_MEM_STAT);
5045 TREE_TYPE (t) = tt;
5046
5047 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
5048 result based on those same flags for the arguments. But if the
5049 arguments aren't really even `tree' expressions, we shouldn't be trying
5050 to do this. */
5051
5052 /* Expressions without side effects may be constant if their
5053 arguments are as well. */
5054 constant = (TREE_CODE_CLASS (code) == tcc_comparison
5055 || TREE_CODE_CLASS (code) == tcc_binary);
5056 read_only = 1;
5057 side_effects = TREE_SIDE_EFFECTS (t);
5058
5059 switch (code)
5060 {
5061 case TRUNC_DIV_EXPR:
5062 case CEIL_DIV_EXPR:
5063 case FLOOR_DIV_EXPR:
5064 case ROUND_DIV_EXPR:
5065 case EXACT_DIV_EXPR:
5066 case CEIL_MOD_EXPR:
5067 case FLOOR_MOD_EXPR:
5068 case ROUND_MOD_EXPR:
5069 case TRUNC_MOD_EXPR:
5070 div_by_zero = integer_zerop (arg1);
5071 break;
5072 default:
5073 div_by_zero = false;
5074 }
5075
5076 PROCESS_ARG (0);
5077 PROCESS_ARG (1);
5078
5079 TREE_SIDE_EFFECTS (t) = side_effects;
5080 if (code == MEM_REF)
5081 {
5082 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
5083 {
5084 tree o = TREE_OPERAND (arg0, 0);
5085 TREE_READONLY (t) = TREE_READONLY (o);
5086 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
5087 }
5088 }
5089 else
5090 {
5091 TREE_READONLY (t) = read_only;
5092 /* Don't mark X / 0 as constant. */
5093 TREE_CONSTANT (t) = constant && !div_by_zero;
5094 TREE_THIS_VOLATILE (t)
5095 = (TREE_CODE_CLASS (code) == tcc_reference
5096 && arg0 && TREE_THIS_VOLATILE (arg0));
5097 }
5098
5099 return t;
5100 }
5101
5102
5103 tree
5104 build3 (enum tree_code code, tree tt, tree arg0, tree arg1,
5105 tree arg2 MEM_STAT_DECL)
5106 {
5107 bool constant, read_only, side_effects;
5108 tree t;
5109
5110 gcc_assert (TREE_CODE_LENGTH (code) == 3);
5111 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
5112
5113 t = make_node (code PASS_MEM_STAT);
5114 TREE_TYPE (t) = tt;
5115
5116 read_only = 1;
5117
5118 /* As a special exception, if COND_EXPR has NULL branches, we
5119 assume that it is a gimple statement and always consider
5120 it to have side effects. */
5121 if (code == COND_EXPR
5122 && tt == void_type_node
5123 && arg1 == NULL_TREE
5124 && arg2 == NULL_TREE)
5125 side_effects = true;
5126 else
5127 side_effects = TREE_SIDE_EFFECTS (t);
5128
5129 PROCESS_ARG (0);
5130 PROCESS_ARG (1);
5131 PROCESS_ARG (2);
5132
5133 if (code == COND_EXPR)
5134 TREE_READONLY (t) = read_only;
5135
5136 TREE_SIDE_EFFECTS (t) = side_effects;
5137 TREE_THIS_VOLATILE (t)
5138 = (TREE_CODE_CLASS (code) == tcc_reference
5139 && arg0 && TREE_THIS_VOLATILE (arg0));
5140
5141 return t;
5142 }
5143
5144 tree
5145 build4 (enum tree_code code, tree tt, tree arg0, tree arg1,
5146 tree arg2, tree arg3 MEM_STAT_DECL)
5147 {
5148 bool constant, read_only, side_effects;
5149 tree t;
5150
5151 gcc_assert (TREE_CODE_LENGTH (code) == 4);
5152
5153 t = make_node (code PASS_MEM_STAT);
5154 TREE_TYPE (t) = tt;
5155
5156 side_effects = TREE_SIDE_EFFECTS (t);
5157
5158 PROCESS_ARG (0);
5159 PROCESS_ARG (1);
5160 PROCESS_ARG (2);
5161 PROCESS_ARG (3);
5162
5163 TREE_SIDE_EFFECTS (t) = side_effects;
5164 TREE_THIS_VOLATILE (t)
5165 = (TREE_CODE_CLASS (code) == tcc_reference
5166 && arg0 && TREE_THIS_VOLATILE (arg0));
5167
5168 return t;
5169 }
5170
5171 tree
5172 build5 (enum tree_code code, tree tt, tree arg0, tree arg1,
5173 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
5174 {
5175 bool constant, read_only, side_effects;
5176 tree t;
5177
5178 gcc_assert (TREE_CODE_LENGTH (code) == 5);
5179
5180 t = make_node (code PASS_MEM_STAT);
5181 TREE_TYPE (t) = tt;
5182
5183 side_effects = TREE_SIDE_EFFECTS (t);
5184
5185 PROCESS_ARG (0);
5186 PROCESS_ARG (1);
5187 PROCESS_ARG (2);
5188 PROCESS_ARG (3);
5189 PROCESS_ARG (4);
5190
5191 TREE_SIDE_EFFECTS (t) = side_effects;
5192 if (code == TARGET_MEM_REF)
5193 {
5194 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
5195 {
5196 tree o = TREE_OPERAND (arg0, 0);
5197 TREE_READONLY (t) = TREE_READONLY (o);
5198 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
5199 }
5200 }
5201 else
5202 TREE_THIS_VOLATILE (t)
5203 = (TREE_CODE_CLASS (code) == tcc_reference
5204 && arg0 && TREE_THIS_VOLATILE (arg0));
5205
5206 return t;
5207 }
5208
5209 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
5210 on the pointer PTR. */
5211
5212 tree
5213 build_simple_mem_ref_loc (location_t loc, tree ptr)
5214 {
5215 poly_int64 offset = 0;
5216 tree ptype = TREE_TYPE (ptr);
5217 tree tem;
5218 /* For convenience allow addresses that collapse to a simple base
5219 and offset. */
5220 if (TREE_CODE (ptr) == ADDR_EXPR
5221 && (handled_component_p (TREE_OPERAND (ptr, 0))
5222 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
5223 {
5224 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
5225 gcc_assert (ptr);
5226 if (TREE_CODE (ptr) == MEM_REF)
5227 {
5228 offset += mem_ref_offset (ptr).force_shwi ();
5229 ptr = TREE_OPERAND (ptr, 0);
5230 }
5231 else
5232 ptr = build_fold_addr_expr (ptr);
5233 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
5234 }
5235 tem = build2 (MEM_REF, TREE_TYPE (ptype),
5236 ptr, build_int_cst (ptype, offset));
5237 SET_EXPR_LOCATION (tem, loc);
5238 return tem;
5239 }
5240
5241 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
5242
5243 poly_offset_int
5244 mem_ref_offset (const_tree t)
5245 {
5246 return poly_offset_int::from (wi::to_poly_wide (TREE_OPERAND (t, 1)),
5247 SIGNED);
5248 }
5249
5250 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
5251 offsetted by OFFSET units. */
5252
5253 tree
5254 build_invariant_address (tree type, tree base, poly_int64 offset)
5255 {
5256 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
5257 build_fold_addr_expr (base),
5258 build_int_cst (ptr_type_node, offset));
5259 tree addr = build1 (ADDR_EXPR, type, ref);
5260 recompute_tree_invariant_for_addr_expr (addr);
5261 return addr;
5262 }
5263
5264 /* Similar except don't specify the TREE_TYPE
5265 and leave the TREE_SIDE_EFFECTS as 0.
5266 It is permissible for arguments to be null,
5267 or even garbage if their values do not matter. */
5268
5269 tree
5270 build_nt (enum tree_code code, ...)
5271 {
5272 tree t;
5273 int length;
5274 int i;
5275 va_list p;
5276
5277 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
5278
5279 va_start (p, code);
5280
5281 t = make_node (code);
5282 length = TREE_CODE_LENGTH (code);
5283
5284 for (i = 0; i < length; i++)
5285 TREE_OPERAND (t, i) = va_arg (p, tree);
5286
5287 va_end (p);
5288 return t;
5289 }
5290
5291 /* Similar to build_nt, but for creating a CALL_EXPR object with a
5292 tree vec. */
5293
5294 tree
5295 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
5296 {
5297 tree ret, t;
5298 unsigned int ix;
5299
5300 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
5301 CALL_EXPR_FN (ret) = fn;
5302 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
5303 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
5304 CALL_EXPR_ARG (ret, ix) = t;
5305 return ret;
5306 }
5307 \f
5308 /* Create a DECL_... node of code CODE, name NAME (if non-null)
5309 and data type TYPE.
5310 We do NOT enter this node in any sort of symbol table.
5311
5312 LOC is the location of the decl.
5313
5314 layout_decl is used to set up the decl's storage layout.
5315 Other slots are initialized to 0 or null pointers. */
5316
5317 tree
5318 build_decl (location_t loc, enum tree_code code, tree name,
5319 tree type MEM_STAT_DECL)
5320 {
5321 tree t;
5322
5323 t = make_node (code PASS_MEM_STAT);
5324 DECL_SOURCE_LOCATION (t) = loc;
5325
5326 /* if (type == error_mark_node)
5327 type = integer_type_node; */
5328 /* That is not done, deliberately, so that having error_mark_node
5329 as the type can suppress useless errors in the use of this variable. */
5330
5331 DECL_NAME (t) = name;
5332 TREE_TYPE (t) = type;
5333
5334 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
5335 layout_decl (t, 0);
5336
5337 return t;
5338 }
5339
5340 /* Create and return a DEBUG_EXPR_DECL node of the given TYPE. */
5341
5342 tree
5343 build_debug_expr_decl (tree type)
5344 {
5345 tree vexpr = make_node (DEBUG_EXPR_DECL);
5346 DECL_ARTIFICIAL (vexpr) = 1;
5347 TREE_TYPE (vexpr) = type;
5348 SET_DECL_MODE (vexpr, TYPE_MODE (type));
5349 return vexpr;
5350 }
5351
5352 /* Builds and returns function declaration with NAME and TYPE. */
5353
5354 tree
5355 build_fn_decl (const char *name, tree type)
5356 {
5357 tree id = get_identifier (name);
5358 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
5359
5360 DECL_EXTERNAL (decl) = 1;
5361 TREE_PUBLIC (decl) = 1;
5362 DECL_ARTIFICIAL (decl) = 1;
5363 TREE_NOTHROW (decl) = 1;
5364
5365 return decl;
5366 }
5367
5368 vec<tree, va_gc> *all_translation_units;
5369
5370 /* Builds a new translation-unit decl with name NAME, queues it in the
5371 global list of translation-unit decls and returns it. */
5372
5373 tree
5374 build_translation_unit_decl (tree name)
5375 {
5376 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
5377 name, NULL_TREE);
5378 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
5379 vec_safe_push (all_translation_units, tu);
5380 return tu;
5381 }
5382
5383 \f
5384 /* BLOCK nodes are used to represent the structure of binding contours
5385 and declarations, once those contours have been exited and their contents
5386 compiled. This information is used for outputting debugging info. */
5387
5388 tree
5389 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
5390 {
5391 tree block = make_node (BLOCK);
5392
5393 BLOCK_VARS (block) = vars;
5394 BLOCK_SUBBLOCKS (block) = subblocks;
5395 BLOCK_SUPERCONTEXT (block) = supercontext;
5396 BLOCK_CHAIN (block) = chain;
5397 return block;
5398 }
5399
5400 \f
5401 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
5402
5403 LOC is the location to use in tree T. */
5404
5405 void
5406 protected_set_expr_location (tree t, location_t loc)
5407 {
5408 if (CAN_HAVE_LOCATION_P (t))
5409 SET_EXPR_LOCATION (t, loc);
5410 else if (t && TREE_CODE (t) == STATEMENT_LIST)
5411 {
5412 t = expr_single (t);
5413 if (t && CAN_HAVE_LOCATION_P (t))
5414 SET_EXPR_LOCATION (t, loc);
5415 }
5416 }
5417
5418 /* Like PROTECTED_SET_EXPR_LOCATION, but only do that if T has
5419 UNKNOWN_LOCATION. */
5420
5421 void
5422 protected_set_expr_location_if_unset (tree t, location_t loc)
5423 {
5424 t = expr_single (t);
5425 if (t && !EXPR_HAS_LOCATION (t))
5426 protected_set_expr_location (t, loc);
5427 }
5428 \f
5429 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
5430 of the various TYPE_QUAL values. */
5431
5432 static void
5433 set_type_quals (tree type, int type_quals)
5434 {
5435 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
5436 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
5437 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
5438 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
5439 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
5440 }
5441
5442 /* Returns true iff CAND and BASE have equivalent language-specific
5443 qualifiers. */
5444
5445 bool
5446 check_lang_type (const_tree cand, const_tree base)
5447 {
5448 if (lang_hooks.types.type_hash_eq == NULL)
5449 return true;
5450 /* type_hash_eq currently only applies to these types. */
5451 if (TREE_CODE (cand) != FUNCTION_TYPE
5452 && TREE_CODE (cand) != METHOD_TYPE)
5453 return true;
5454 return lang_hooks.types.type_hash_eq (cand, base);
5455 }
5456
5457 /* This function checks to see if TYPE matches the size one of the built-in
5458 atomic types, and returns that core atomic type. */
5459
5460 static tree
5461 find_atomic_core_type (const_tree type)
5462 {
5463 tree base_atomic_type;
5464
5465 /* Only handle complete types. */
5466 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
5467 return NULL_TREE;
5468
5469 switch (tree_to_uhwi (TYPE_SIZE (type)))
5470 {
5471 case 8:
5472 base_atomic_type = atomicQI_type_node;
5473 break;
5474
5475 case 16:
5476 base_atomic_type = atomicHI_type_node;
5477 break;
5478
5479 case 32:
5480 base_atomic_type = atomicSI_type_node;
5481 break;
5482
5483 case 64:
5484 base_atomic_type = atomicDI_type_node;
5485 break;
5486
5487 case 128:
5488 base_atomic_type = atomicTI_type_node;
5489 break;
5490
5491 default:
5492 base_atomic_type = NULL_TREE;
5493 }
5494
5495 return base_atomic_type;
5496 }
5497
5498 /* Returns true iff unqualified CAND and BASE are equivalent. */
5499
5500 bool
5501 check_base_type (const_tree cand, const_tree base)
5502 {
5503 if (TYPE_NAME (cand) != TYPE_NAME (base)
5504 /* Apparently this is needed for Objective-C. */
5505 || TYPE_CONTEXT (cand) != TYPE_CONTEXT (base)
5506 || !attribute_list_equal (TYPE_ATTRIBUTES (cand),
5507 TYPE_ATTRIBUTES (base)))
5508 return false;
5509 /* Check alignment. */
5510 if (TYPE_ALIGN (cand) == TYPE_ALIGN (base)
5511 && TYPE_USER_ALIGN (cand) == TYPE_USER_ALIGN (base))
5512 return true;
5513 /* Atomic types increase minimal alignment. We must to do so as well
5514 or we get duplicated canonical types. See PR88686. */
5515 if ((TYPE_QUALS (cand) & TYPE_QUAL_ATOMIC))
5516 {
5517 /* See if this object can map to a basic atomic type. */
5518 tree atomic_type = find_atomic_core_type (cand);
5519 if (atomic_type && TYPE_ALIGN (atomic_type) == TYPE_ALIGN (cand))
5520 return true;
5521 }
5522 return false;
5523 }
5524
5525 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
5526
5527 bool
5528 check_qualified_type (const_tree cand, const_tree base, int type_quals)
5529 {
5530 return (TYPE_QUALS (cand) == type_quals
5531 && check_base_type (cand, base)
5532 && check_lang_type (cand, base));
5533 }
5534
5535 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
5536
5537 static bool
5538 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
5539 {
5540 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
5541 && TYPE_NAME (cand) == TYPE_NAME (base)
5542 /* Apparently this is needed for Objective-C. */
5543 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
5544 /* Check alignment. */
5545 && TYPE_ALIGN (cand) == align
5546 /* Check this is a user-aligned type as build_aligned_type
5547 would create. */
5548 && TYPE_USER_ALIGN (cand)
5549 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
5550 TYPE_ATTRIBUTES (base))
5551 && check_lang_type (cand, base));
5552 }
5553
5554 /* Return a version of the TYPE, qualified as indicated by the
5555 TYPE_QUALS, if one exists. If no qualified version exists yet,
5556 return NULL_TREE. */
5557
5558 tree
5559 get_qualified_type (tree type, int type_quals)
5560 {
5561 if (TYPE_QUALS (type) == type_quals)
5562 return type;
5563
5564 tree mv = TYPE_MAIN_VARIANT (type);
5565 if (check_qualified_type (mv, type, type_quals))
5566 return mv;
5567
5568 /* Search the chain of variants to see if there is already one there just
5569 like the one we need to have. If so, use that existing one. We must
5570 preserve the TYPE_NAME, since there is code that depends on this. */
5571 for (tree *tp = &TYPE_NEXT_VARIANT (mv); *tp; tp = &TYPE_NEXT_VARIANT (*tp))
5572 if (check_qualified_type (*tp, type, type_quals))
5573 {
5574 /* Put the found variant at the head of the variant list so
5575 frequently searched variants get found faster. The C++ FE
5576 benefits greatly from this. */
5577 tree t = *tp;
5578 *tp = TYPE_NEXT_VARIANT (t);
5579 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (mv);
5580 TYPE_NEXT_VARIANT (mv) = t;
5581 return t;
5582 }
5583
5584 return NULL_TREE;
5585 }
5586
5587 /* Like get_qualified_type, but creates the type if it does not
5588 exist. This function never returns NULL_TREE. */
5589
5590 tree
5591 build_qualified_type (tree type, int type_quals MEM_STAT_DECL)
5592 {
5593 tree t;
5594
5595 /* See if we already have the appropriate qualified variant. */
5596 t = get_qualified_type (type, type_quals);
5597
5598 /* If not, build it. */
5599 if (!t)
5600 {
5601 t = build_variant_type_copy (type PASS_MEM_STAT);
5602 set_type_quals (t, type_quals);
5603
5604 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
5605 {
5606 /* See if this object can map to a basic atomic type. */
5607 tree atomic_type = find_atomic_core_type (type);
5608 if (atomic_type)
5609 {
5610 /* Ensure the alignment of this type is compatible with
5611 the required alignment of the atomic type. */
5612 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
5613 SET_TYPE_ALIGN (t, TYPE_ALIGN (atomic_type));
5614 }
5615 }
5616
5617 if (TYPE_STRUCTURAL_EQUALITY_P (type))
5618 /* Propagate structural equality. */
5619 SET_TYPE_STRUCTURAL_EQUALITY (t);
5620 else if (TYPE_CANONICAL (type) != type)
5621 /* Build the underlying canonical type, since it is different
5622 from TYPE. */
5623 {
5624 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
5625 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
5626 }
5627 else
5628 /* T is its own canonical type. */
5629 TYPE_CANONICAL (t) = t;
5630
5631 }
5632
5633 return t;
5634 }
5635
5636 /* Create a variant of type T with alignment ALIGN. */
5637
5638 tree
5639 build_aligned_type (tree type, unsigned int align)
5640 {
5641 tree t;
5642
5643 if (TYPE_PACKED (type)
5644 || TYPE_ALIGN (type) == align)
5645 return type;
5646
5647 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
5648 if (check_aligned_type (t, type, align))
5649 return t;
5650
5651 t = build_variant_type_copy (type);
5652 SET_TYPE_ALIGN (t, align);
5653 TYPE_USER_ALIGN (t) = 1;
5654
5655 return t;
5656 }
5657
5658 /* Create a new distinct copy of TYPE. The new type is made its own
5659 MAIN_VARIANT. If TYPE requires structural equality checks, the
5660 resulting type requires structural equality checks; otherwise, its
5661 TYPE_CANONICAL points to itself. */
5662
5663 tree
5664 build_distinct_type_copy (tree type MEM_STAT_DECL)
5665 {
5666 tree t = copy_node (type PASS_MEM_STAT);
5667
5668 TYPE_POINTER_TO (t) = 0;
5669 TYPE_REFERENCE_TO (t) = 0;
5670
5671 /* Set the canonical type either to a new equivalence class, or
5672 propagate the need for structural equality checks. */
5673 if (TYPE_STRUCTURAL_EQUALITY_P (type))
5674 SET_TYPE_STRUCTURAL_EQUALITY (t);
5675 else
5676 TYPE_CANONICAL (t) = t;
5677
5678 /* Make it its own variant. */
5679 TYPE_MAIN_VARIANT (t) = t;
5680 TYPE_NEXT_VARIANT (t) = 0;
5681
5682 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
5683 whose TREE_TYPE is not t. This can also happen in the Ada
5684 frontend when using subtypes. */
5685
5686 return t;
5687 }
5688
5689 /* Create a new variant of TYPE, equivalent but distinct. This is so
5690 the caller can modify it. TYPE_CANONICAL for the return type will
5691 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
5692 are considered equal by the language itself (or that both types
5693 require structural equality checks). */
5694
5695 tree
5696 build_variant_type_copy (tree type MEM_STAT_DECL)
5697 {
5698 tree t, m = TYPE_MAIN_VARIANT (type);
5699
5700 t = build_distinct_type_copy (type PASS_MEM_STAT);
5701
5702 /* Since we're building a variant, assume that it is a non-semantic
5703 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
5704 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
5705 /* Type variants have no alias set defined. */
5706 TYPE_ALIAS_SET (t) = -1;
5707
5708 /* Add the new type to the chain of variants of TYPE. */
5709 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
5710 TYPE_NEXT_VARIANT (m) = t;
5711 TYPE_MAIN_VARIANT (t) = m;
5712
5713 return t;
5714 }
5715 \f
5716 /* Return true if the from tree in both tree maps are equal. */
5717
5718 int
5719 tree_map_base_eq (const void *va, const void *vb)
5720 {
5721 const struct tree_map_base *const a = (const struct tree_map_base *) va,
5722 *const b = (const struct tree_map_base *) vb;
5723 return (a->from == b->from);
5724 }
5725
5726 /* Hash a from tree in a tree_base_map. */
5727
5728 unsigned int
5729 tree_map_base_hash (const void *item)
5730 {
5731 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
5732 }
5733
5734 /* Return true if this tree map structure is marked for garbage collection
5735 purposes. We simply return true if the from tree is marked, so that this
5736 structure goes away when the from tree goes away. */
5737
5738 int
5739 tree_map_base_marked_p (const void *p)
5740 {
5741 return ggc_marked_p (((const struct tree_map_base *) p)->from);
5742 }
5743
5744 /* Hash a from tree in a tree_map. */
5745
5746 unsigned int
5747 tree_map_hash (const void *item)
5748 {
5749 return (((const struct tree_map *) item)->hash);
5750 }
5751
5752 /* Hash a from tree in a tree_decl_map. */
5753
5754 unsigned int
5755 tree_decl_map_hash (const void *item)
5756 {
5757 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
5758 }
5759
5760 /* Return the initialization priority for DECL. */
5761
5762 priority_type
5763 decl_init_priority_lookup (tree decl)
5764 {
5765 symtab_node *snode = symtab_node::get (decl);
5766
5767 if (!snode)
5768 return DEFAULT_INIT_PRIORITY;
5769 return
5770 snode->get_init_priority ();
5771 }
5772
5773 /* Return the finalization priority for DECL. */
5774
5775 priority_type
5776 decl_fini_priority_lookup (tree decl)
5777 {
5778 cgraph_node *node = cgraph_node::get (decl);
5779
5780 if (!node)
5781 return DEFAULT_INIT_PRIORITY;
5782 return
5783 node->get_fini_priority ();
5784 }
5785
5786 /* Set the initialization priority for DECL to PRIORITY. */
5787
5788 void
5789 decl_init_priority_insert (tree decl, priority_type priority)
5790 {
5791 struct symtab_node *snode;
5792
5793 if (priority == DEFAULT_INIT_PRIORITY)
5794 {
5795 snode = symtab_node::get (decl);
5796 if (!snode)
5797 return;
5798 }
5799 else if (VAR_P (decl))
5800 snode = varpool_node::get_create (decl);
5801 else
5802 snode = cgraph_node::get_create (decl);
5803 snode->set_init_priority (priority);
5804 }
5805
5806 /* Set the finalization priority for DECL to PRIORITY. */
5807
5808 void
5809 decl_fini_priority_insert (tree decl, priority_type priority)
5810 {
5811 struct cgraph_node *node;
5812
5813 if (priority == DEFAULT_INIT_PRIORITY)
5814 {
5815 node = cgraph_node::get (decl);
5816 if (!node)
5817 return;
5818 }
5819 else
5820 node = cgraph_node::get_create (decl);
5821 node->set_fini_priority (priority);
5822 }
5823
5824 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
5825
5826 static void
5827 print_debug_expr_statistics (void)
5828 {
5829 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
5830 (long) debug_expr_for_decl->size (),
5831 (long) debug_expr_for_decl->elements (),
5832 debug_expr_for_decl->collisions ());
5833 }
5834
5835 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
5836
5837 static void
5838 print_value_expr_statistics (void)
5839 {
5840 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
5841 (long) value_expr_for_decl->size (),
5842 (long) value_expr_for_decl->elements (),
5843 value_expr_for_decl->collisions ());
5844 }
5845
5846 /* Lookup a debug expression for FROM, and return it if we find one. */
5847
5848 tree
5849 decl_debug_expr_lookup (tree from)
5850 {
5851 struct tree_decl_map *h, in;
5852 in.base.from = from;
5853
5854 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
5855 if (h)
5856 return h->to;
5857 return NULL_TREE;
5858 }
5859
5860 /* Insert a mapping FROM->TO in the debug expression hashtable. */
5861
5862 void
5863 decl_debug_expr_insert (tree from, tree to)
5864 {
5865 struct tree_decl_map *h;
5866
5867 h = ggc_alloc<tree_decl_map> ();
5868 h->base.from = from;
5869 h->to = to;
5870 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
5871 }
5872
5873 /* Lookup a value expression for FROM, and return it if we find one. */
5874
5875 tree
5876 decl_value_expr_lookup (tree from)
5877 {
5878 struct tree_decl_map *h, in;
5879 in.base.from = from;
5880
5881 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
5882 if (h)
5883 return h->to;
5884 return NULL_TREE;
5885 }
5886
5887 /* Insert a mapping FROM->TO in the value expression hashtable. */
5888
5889 void
5890 decl_value_expr_insert (tree from, tree to)
5891 {
5892 struct tree_decl_map *h;
5893
5894 /* Uses of FROM shouldn't look like they happen at the location of TO. */
5895 to = protected_set_expr_location_unshare (to, UNKNOWN_LOCATION);
5896
5897 h = ggc_alloc<tree_decl_map> ();
5898 h->base.from = from;
5899 h->to = to;
5900 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
5901 }
5902
5903 /* Lookup a vector of debug arguments for FROM, and return it if we
5904 find one. */
5905
5906 vec<tree, va_gc> **
5907 decl_debug_args_lookup (tree from)
5908 {
5909 struct tree_vec_map *h, in;
5910
5911 if (!DECL_HAS_DEBUG_ARGS_P (from))
5912 return NULL;
5913 gcc_checking_assert (debug_args_for_decl != NULL);
5914 in.base.from = from;
5915 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
5916 if (h)
5917 return &h->to;
5918 return NULL;
5919 }
5920
5921 /* Insert a mapping FROM->empty vector of debug arguments in the value
5922 expression hashtable. */
5923
5924 vec<tree, va_gc> **
5925 decl_debug_args_insert (tree from)
5926 {
5927 struct tree_vec_map *h;
5928 tree_vec_map **loc;
5929
5930 if (DECL_HAS_DEBUG_ARGS_P (from))
5931 return decl_debug_args_lookup (from);
5932 if (debug_args_for_decl == NULL)
5933 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
5934 h = ggc_alloc<tree_vec_map> ();
5935 h->base.from = from;
5936 h->to = NULL;
5937 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
5938 *loc = h;
5939 DECL_HAS_DEBUG_ARGS_P (from) = 1;
5940 return &h->to;
5941 }
5942
5943 /* Hashing of types so that we don't make duplicates.
5944 The entry point is `type_hash_canon'. */
5945
5946 /* Generate the default hash code for TYPE. This is designed for
5947 speed, rather than maximum entropy. */
5948
5949 hashval_t
5950 type_hash_canon_hash (tree type)
5951 {
5952 inchash::hash hstate;
5953
5954 hstate.add_int (TREE_CODE (type));
5955
5956 if (TREE_TYPE (type))
5957 hstate.add_object (TYPE_HASH (TREE_TYPE (type)));
5958
5959 for (tree t = TYPE_ATTRIBUTES (type); t; t = TREE_CHAIN (t))
5960 /* Just the identifier is adequate to distinguish. */
5961 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (t)));
5962
5963 switch (TREE_CODE (type))
5964 {
5965 case METHOD_TYPE:
5966 hstate.add_object (TYPE_HASH (TYPE_METHOD_BASETYPE (type)));
5967 /* FALLTHROUGH. */
5968 case FUNCTION_TYPE:
5969 for (tree t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
5970 if (TREE_VALUE (t) != error_mark_node)
5971 hstate.add_object (TYPE_HASH (TREE_VALUE (t)));
5972 break;
5973
5974 case OFFSET_TYPE:
5975 hstate.add_object (TYPE_HASH (TYPE_OFFSET_BASETYPE (type)));
5976 break;
5977
5978 case ARRAY_TYPE:
5979 {
5980 if (TYPE_DOMAIN (type))
5981 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (type)));
5982 if (!AGGREGATE_TYPE_P (TREE_TYPE (type)))
5983 {
5984 unsigned typeless = TYPE_TYPELESS_STORAGE (type);
5985 hstate.add_object (typeless);
5986 }
5987 }
5988 break;
5989
5990 case INTEGER_TYPE:
5991 {
5992 tree t = TYPE_MAX_VALUE (type);
5993 if (!t)
5994 t = TYPE_MIN_VALUE (type);
5995 for (int i = 0; i < TREE_INT_CST_NUNITS (t); i++)
5996 hstate.add_object (TREE_INT_CST_ELT (t, i));
5997 break;
5998 }
5999
6000 case REAL_TYPE:
6001 case FIXED_POINT_TYPE:
6002 {
6003 unsigned prec = TYPE_PRECISION (type);
6004 hstate.add_object (prec);
6005 break;
6006 }
6007
6008 case VECTOR_TYPE:
6009 hstate.add_poly_int (TYPE_VECTOR_SUBPARTS (type));
6010 break;
6011
6012 default:
6013 break;
6014 }
6015
6016 return hstate.end ();
6017 }
6018
6019 /* These are the Hashtable callback functions. */
6020
6021 /* Returns true iff the types are equivalent. */
6022
6023 bool
6024 type_cache_hasher::equal (type_hash *a, type_hash *b)
6025 {
6026 /* First test the things that are the same for all types. */
6027 if (a->hash != b->hash
6028 || TREE_CODE (a->type) != TREE_CODE (b->type)
6029 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6030 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6031 TYPE_ATTRIBUTES (b->type))
6032 || (TREE_CODE (a->type) != COMPLEX_TYPE
6033 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6034 return 0;
6035
6036 /* Be careful about comparing arrays before and after the element type
6037 has been completed; don't compare TYPE_ALIGN unless both types are
6038 complete. */
6039 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6040 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6041 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6042 return 0;
6043
6044 switch (TREE_CODE (a->type))
6045 {
6046 case VOID_TYPE:
6047 case OPAQUE_TYPE:
6048 case COMPLEX_TYPE:
6049 case POINTER_TYPE:
6050 case REFERENCE_TYPE:
6051 case NULLPTR_TYPE:
6052 return 1;
6053
6054 case VECTOR_TYPE:
6055 return known_eq (TYPE_VECTOR_SUBPARTS (a->type),
6056 TYPE_VECTOR_SUBPARTS (b->type));
6057
6058 case ENUMERAL_TYPE:
6059 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6060 && !(TYPE_VALUES (a->type)
6061 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6062 && TYPE_VALUES (b->type)
6063 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6064 && type_list_equal (TYPE_VALUES (a->type),
6065 TYPE_VALUES (b->type))))
6066 return 0;
6067
6068 /* fall through */
6069
6070 case INTEGER_TYPE:
6071 case REAL_TYPE:
6072 case BOOLEAN_TYPE:
6073 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6074 return false;
6075 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6076 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6077 TYPE_MAX_VALUE (b->type)))
6078 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6079 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6080 TYPE_MIN_VALUE (b->type))));
6081
6082 case FIXED_POINT_TYPE:
6083 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6084
6085 case OFFSET_TYPE:
6086 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6087
6088 case METHOD_TYPE:
6089 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6090 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6091 || (TYPE_ARG_TYPES (a->type)
6092 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6093 && TYPE_ARG_TYPES (b->type)
6094 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6095 && type_list_equal (TYPE_ARG_TYPES (a->type),
6096 TYPE_ARG_TYPES (b->type)))))
6097 break;
6098 return 0;
6099 case ARRAY_TYPE:
6100 /* Don't compare TYPE_TYPELESS_STORAGE flag on aggregates,
6101 where the flag should be inherited from the element type
6102 and can change after ARRAY_TYPEs are created; on non-aggregates
6103 compare it and hash it, scalars will never have that flag set
6104 and we need to differentiate between arrays created by different
6105 front-ends or middle-end created arrays. */
6106 return (TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type)
6107 && (AGGREGATE_TYPE_P (TREE_TYPE (a->type))
6108 || (TYPE_TYPELESS_STORAGE (a->type)
6109 == TYPE_TYPELESS_STORAGE (b->type))));
6110
6111 case RECORD_TYPE:
6112 case UNION_TYPE:
6113 case QUAL_UNION_TYPE:
6114 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6115 || (TYPE_FIELDS (a->type)
6116 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6117 && TYPE_FIELDS (b->type)
6118 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6119 && type_list_equal (TYPE_FIELDS (a->type),
6120 TYPE_FIELDS (b->type))));
6121
6122 case FUNCTION_TYPE:
6123 if ((TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6124 && (TYPE_NO_NAMED_ARGS_STDARG_P (a->type)
6125 == TYPE_NO_NAMED_ARGS_STDARG_P (b->type)))
6126 || (TYPE_ARG_TYPES (a->type)
6127 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6128 && TYPE_ARG_TYPES (b->type)
6129 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6130 && type_list_equal (TYPE_ARG_TYPES (a->type),
6131 TYPE_ARG_TYPES (b->type))))
6132 break;
6133 return 0;
6134
6135 default:
6136 return 0;
6137 }
6138
6139 if (lang_hooks.types.type_hash_eq != NULL)
6140 return lang_hooks.types.type_hash_eq (a->type, b->type);
6141
6142 return 1;
6143 }
6144
6145 /* Given TYPE, and HASHCODE its hash code, return the canonical
6146 object for an identical type if one already exists.
6147 Otherwise, return TYPE, and record it as the canonical object.
6148
6149 To use this function, first create a type of the sort you want.
6150 Then compute its hash code from the fields of the type that
6151 make it different from other similar types.
6152 Then call this function and use the value. */
6153
6154 tree
6155 type_hash_canon (unsigned int hashcode, tree type)
6156 {
6157 type_hash in;
6158 type_hash **loc;
6159
6160 /* The hash table only contains main variants, so ensure that's what we're
6161 being passed. */
6162 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6163
6164 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6165 must call that routine before comparing TYPE_ALIGNs. */
6166 layout_type (type);
6167
6168 in.hash = hashcode;
6169 in.type = type;
6170
6171 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
6172 if (*loc)
6173 {
6174 tree t1 = ((type_hash *) *loc)->type;
6175 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1
6176 && t1 != type);
6177 if (TYPE_UID (type) + 1 == next_type_uid)
6178 --next_type_uid;
6179 /* Free also min/max values and the cache for integer
6180 types. This can't be done in free_node, as LTO frees
6181 those on its own. */
6182 if (TREE_CODE (type) == INTEGER_TYPE)
6183 {
6184 if (TYPE_MIN_VALUE (type)
6185 && TREE_TYPE (TYPE_MIN_VALUE (type)) == type)
6186 {
6187 /* Zero is always in TYPE_CACHED_VALUES. */
6188 if (! TYPE_UNSIGNED (type))
6189 int_cst_hash_table->remove_elt (TYPE_MIN_VALUE (type));
6190 ggc_free (TYPE_MIN_VALUE (type));
6191 }
6192 if (TYPE_MAX_VALUE (type)
6193 && TREE_TYPE (TYPE_MAX_VALUE (type)) == type)
6194 {
6195 int_cst_hash_table->remove_elt (TYPE_MAX_VALUE (type));
6196 ggc_free (TYPE_MAX_VALUE (type));
6197 }
6198 if (TYPE_CACHED_VALUES_P (type))
6199 ggc_free (TYPE_CACHED_VALUES (type));
6200 }
6201 free_node (type);
6202 return t1;
6203 }
6204 else
6205 {
6206 struct type_hash *h;
6207
6208 h = ggc_alloc<type_hash> ();
6209 h->hash = hashcode;
6210 h->type = type;
6211 *loc = h;
6212
6213 return type;
6214 }
6215 }
6216
6217 static void
6218 print_type_hash_statistics (void)
6219 {
6220 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
6221 (long) type_hash_table->size (),
6222 (long) type_hash_table->elements (),
6223 type_hash_table->collisions ());
6224 }
6225
6226 /* Given two lists of types
6227 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6228 return 1 if the lists contain the same types in the same order.
6229 Also, the TREE_PURPOSEs must match. */
6230
6231 bool
6232 type_list_equal (const_tree l1, const_tree l2)
6233 {
6234 const_tree t1, t2;
6235
6236 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6237 if (TREE_VALUE (t1) != TREE_VALUE (t2)
6238 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
6239 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
6240 && (TREE_TYPE (TREE_PURPOSE (t1))
6241 == TREE_TYPE (TREE_PURPOSE (t2))))))
6242 return false;
6243
6244 return t1 == t2;
6245 }
6246
6247 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
6248 given by TYPE. If the argument list accepts variable arguments,
6249 then this function counts only the ordinary arguments. */
6250
6251 int
6252 type_num_arguments (const_tree fntype)
6253 {
6254 int i = 0;
6255
6256 for (tree t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
6257 /* If the function does not take a variable number of arguments,
6258 the last element in the list will have type `void'. */
6259 if (VOID_TYPE_P (TREE_VALUE (t)))
6260 break;
6261 else
6262 ++i;
6263
6264 return i;
6265 }
6266
6267 /* Return the type of the function TYPE's argument ARGNO if known.
6268 For vararg function's where ARGNO refers to one of the variadic
6269 arguments return null. Otherwise, return a void_type_node for
6270 out-of-bounds ARGNO. */
6271
6272 tree
6273 type_argument_type (const_tree fntype, unsigned argno)
6274 {
6275 /* Treat zero the same as an out-of-bounds argument number. */
6276 if (!argno)
6277 return void_type_node;
6278
6279 function_args_iterator iter;
6280
6281 tree argtype;
6282 unsigned i = 1;
6283 FOREACH_FUNCTION_ARGS (fntype, argtype, iter)
6284 {
6285 /* A vararg function's argument list ends in a null. Otherwise,
6286 an ordinary function's argument list ends with void. Return
6287 null if ARGNO refers to a vararg argument, void_type_node if
6288 it's out of bounds, and the formal argument type otherwise. */
6289 if (!argtype)
6290 break;
6291
6292 if (i == argno || VOID_TYPE_P (argtype))
6293 return argtype;
6294
6295 ++i;
6296 }
6297
6298 return NULL_TREE;
6299 }
6300
6301 /* Nonzero if integer constants T1 and T2
6302 represent the same constant value. */
6303
6304 int
6305 tree_int_cst_equal (const_tree t1, const_tree t2)
6306 {
6307 if (t1 == t2)
6308 return 1;
6309
6310 if (t1 == 0 || t2 == 0)
6311 return 0;
6312
6313 STRIP_ANY_LOCATION_WRAPPER (t1);
6314 STRIP_ANY_LOCATION_WRAPPER (t2);
6315
6316 if (TREE_CODE (t1) == INTEGER_CST
6317 && TREE_CODE (t2) == INTEGER_CST
6318 && wi::to_widest (t1) == wi::to_widest (t2))
6319 return 1;
6320
6321 return 0;
6322 }
6323
6324 /* Return true if T is an INTEGER_CST whose numerical value (extended
6325 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
6326
6327 bool
6328 tree_fits_shwi_p (const_tree t)
6329 {
6330 return (t != NULL_TREE
6331 && TREE_CODE (t) == INTEGER_CST
6332 && wi::fits_shwi_p (wi::to_widest (t)));
6333 }
6334
6335 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
6336 value (extended according to TYPE_UNSIGNED) fits in a poly_int64. */
6337
6338 bool
6339 tree_fits_poly_int64_p (const_tree t)
6340 {
6341 if (t == NULL_TREE)
6342 return false;
6343 if (POLY_INT_CST_P (t))
6344 {
6345 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
6346 if (!wi::fits_shwi_p (wi::to_wide (POLY_INT_CST_COEFF (t, i))))
6347 return false;
6348 return true;
6349 }
6350 return (TREE_CODE (t) == INTEGER_CST
6351 && wi::fits_shwi_p (wi::to_widest (t)));
6352 }
6353
6354 /* Return true if T is an INTEGER_CST whose numerical value (extended
6355 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
6356
6357 bool
6358 tree_fits_uhwi_p (const_tree t)
6359 {
6360 return (t != NULL_TREE
6361 && TREE_CODE (t) == INTEGER_CST
6362 && wi::fits_uhwi_p (wi::to_widest (t)));
6363 }
6364
6365 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
6366 value (extended according to TYPE_UNSIGNED) fits in a poly_uint64. */
6367
6368 bool
6369 tree_fits_poly_uint64_p (const_tree t)
6370 {
6371 if (t == NULL_TREE)
6372 return false;
6373 if (POLY_INT_CST_P (t))
6374 {
6375 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
6376 if (!wi::fits_uhwi_p (wi::to_widest (POLY_INT_CST_COEFF (t, i))))
6377 return false;
6378 return true;
6379 }
6380 return (TREE_CODE (t) == INTEGER_CST
6381 && wi::fits_uhwi_p (wi::to_widest (t)));
6382 }
6383
6384 /* T is an INTEGER_CST whose numerical value (extended according to
6385 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
6386 HOST_WIDE_INT. */
6387
6388 HOST_WIDE_INT
6389 tree_to_shwi (const_tree t)
6390 {
6391 gcc_assert (tree_fits_shwi_p (t));
6392 return TREE_INT_CST_LOW (t);
6393 }
6394
6395 /* T is an INTEGER_CST whose numerical value (extended according to
6396 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
6397 HOST_WIDE_INT. */
6398
6399 unsigned HOST_WIDE_INT
6400 tree_to_uhwi (const_tree t)
6401 {
6402 gcc_assert (tree_fits_uhwi_p (t));
6403 return TREE_INT_CST_LOW (t);
6404 }
6405
6406 /* Return the most significant (sign) bit of T. */
6407
6408 int
6409 tree_int_cst_sign_bit (const_tree t)
6410 {
6411 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
6412
6413 return wi::extract_uhwi (wi::to_wide (t), bitno, 1);
6414 }
6415
6416 /* Return an indication of the sign of the integer constant T.
6417 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
6418 Note that -1 will never be returned if T's type is unsigned. */
6419
6420 int
6421 tree_int_cst_sgn (const_tree t)
6422 {
6423 if (wi::to_wide (t) == 0)
6424 return 0;
6425 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
6426 return 1;
6427 else if (wi::neg_p (wi::to_wide (t)))
6428 return -1;
6429 else
6430 return 1;
6431 }
6432
6433 /* Return the minimum number of bits needed to represent VALUE in a
6434 signed or unsigned type, UNSIGNEDP says which. */
6435
6436 unsigned int
6437 tree_int_cst_min_precision (tree value, signop sgn)
6438 {
6439 /* If the value is negative, compute its negative minus 1. The latter
6440 adjustment is because the absolute value of the largest negative value
6441 is one larger than the largest positive value. This is equivalent to
6442 a bit-wise negation, so use that operation instead. */
6443
6444 if (tree_int_cst_sgn (value) < 0)
6445 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
6446
6447 /* Return the number of bits needed, taking into account the fact
6448 that we need one more bit for a signed than unsigned type.
6449 If value is 0 or -1, the minimum precision is 1 no matter
6450 whether unsignedp is true or false. */
6451
6452 if (integer_zerop (value))
6453 return 1;
6454 else
6455 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
6456 }
6457
6458 /* Return truthvalue of whether T1 is the same tree structure as T2.
6459 Return 1 if they are the same.
6460 Return 0 if they are understandably different.
6461 Return -1 if either contains tree structure not understood by
6462 this function. */
6463
6464 int
6465 simple_cst_equal (const_tree t1, const_tree t2)
6466 {
6467 enum tree_code code1, code2;
6468 int cmp;
6469 int i;
6470
6471 if (t1 == t2)
6472 return 1;
6473 if (t1 == 0 || t2 == 0)
6474 return 0;
6475
6476 /* For location wrappers to be the same, they must be at the same
6477 source location (and wrap the same thing). */
6478 if (location_wrapper_p (t1) && location_wrapper_p (t2))
6479 {
6480 if (EXPR_LOCATION (t1) != EXPR_LOCATION (t2))
6481 return 0;
6482 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6483 }
6484
6485 code1 = TREE_CODE (t1);
6486 code2 = TREE_CODE (t2);
6487
6488 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
6489 {
6490 if (CONVERT_EXPR_CODE_P (code2)
6491 || code2 == NON_LVALUE_EXPR)
6492 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6493 else
6494 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
6495 }
6496
6497 else if (CONVERT_EXPR_CODE_P (code2)
6498 || code2 == NON_LVALUE_EXPR)
6499 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
6500
6501 if (code1 != code2)
6502 return 0;
6503
6504 switch (code1)
6505 {
6506 case INTEGER_CST:
6507 return wi::to_widest (t1) == wi::to_widest (t2);
6508
6509 case REAL_CST:
6510 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
6511
6512 case FIXED_CST:
6513 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
6514
6515 case STRING_CST:
6516 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
6517 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
6518 TREE_STRING_LENGTH (t1)));
6519
6520 case CONSTRUCTOR:
6521 {
6522 unsigned HOST_WIDE_INT idx;
6523 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
6524 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
6525
6526 if (vec_safe_length (v1) != vec_safe_length (v2))
6527 return false;
6528
6529 for (idx = 0; idx < vec_safe_length (v1); ++idx)
6530 /* ??? Should we handle also fields here? */
6531 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
6532 return false;
6533 return true;
6534 }
6535
6536 case SAVE_EXPR:
6537 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6538
6539 case CALL_EXPR:
6540 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
6541 if (cmp <= 0)
6542 return cmp;
6543 if (call_expr_nargs (t1) != call_expr_nargs (t2))
6544 return 0;
6545 {
6546 const_tree arg1, arg2;
6547 const_call_expr_arg_iterator iter1, iter2;
6548 for (arg1 = first_const_call_expr_arg (t1, &iter1),
6549 arg2 = first_const_call_expr_arg (t2, &iter2);
6550 arg1 && arg2;
6551 arg1 = next_const_call_expr_arg (&iter1),
6552 arg2 = next_const_call_expr_arg (&iter2))
6553 {
6554 cmp = simple_cst_equal (arg1, arg2);
6555 if (cmp <= 0)
6556 return cmp;
6557 }
6558 return arg1 == arg2;
6559 }
6560
6561 case TARGET_EXPR:
6562 /* Special case: if either target is an unallocated VAR_DECL,
6563 it means that it's going to be unified with whatever the
6564 TARGET_EXPR is really supposed to initialize, so treat it
6565 as being equivalent to anything. */
6566 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
6567 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
6568 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
6569 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
6570 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
6571 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
6572 cmp = 1;
6573 else
6574 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6575
6576 if (cmp <= 0)
6577 return cmp;
6578
6579 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
6580
6581 case WITH_CLEANUP_EXPR:
6582 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6583 if (cmp <= 0)
6584 return cmp;
6585
6586 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
6587
6588 case COMPONENT_REF:
6589 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
6590 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6591
6592 return 0;
6593
6594 case VAR_DECL:
6595 case PARM_DECL:
6596 case CONST_DECL:
6597 case FUNCTION_DECL:
6598 return 0;
6599
6600 default:
6601 if (POLY_INT_CST_P (t1))
6602 /* A false return means maybe_ne rather than known_ne. */
6603 return known_eq (poly_widest_int::from (poly_int_cst_value (t1),
6604 TYPE_SIGN (TREE_TYPE (t1))),
6605 poly_widest_int::from (poly_int_cst_value (t2),
6606 TYPE_SIGN (TREE_TYPE (t2))));
6607 break;
6608 }
6609
6610 /* This general rule works for most tree codes. All exceptions should be
6611 handled above. If this is a language-specific tree code, we can't
6612 trust what might be in the operand, so say we don't know
6613 the situation. */
6614 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
6615 return -1;
6616
6617 switch (TREE_CODE_CLASS (code1))
6618 {
6619 case tcc_unary:
6620 case tcc_binary:
6621 case tcc_comparison:
6622 case tcc_expression:
6623 case tcc_reference:
6624 case tcc_statement:
6625 cmp = 1;
6626 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
6627 {
6628 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
6629 if (cmp <= 0)
6630 return cmp;
6631 }
6632
6633 return cmp;
6634
6635 default:
6636 return -1;
6637 }
6638 }
6639
6640 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
6641 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
6642 than U, respectively. */
6643
6644 int
6645 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
6646 {
6647 if (tree_int_cst_sgn (t) < 0)
6648 return -1;
6649 else if (!tree_fits_uhwi_p (t))
6650 return 1;
6651 else if (TREE_INT_CST_LOW (t) == u)
6652 return 0;
6653 else if (TREE_INT_CST_LOW (t) < u)
6654 return -1;
6655 else
6656 return 1;
6657 }
6658
6659 /* Return true if SIZE represents a constant size that is in bounds of
6660 what the middle-end and the backend accepts (covering not more than
6661 half of the address-space).
6662 When PERR is non-null, set *PERR on failure to the description of
6663 why SIZE is not valid. */
6664
6665 bool
6666 valid_constant_size_p (const_tree size, cst_size_error *perr /* = NULL */)
6667 {
6668 if (POLY_INT_CST_P (size))
6669 {
6670 if (TREE_OVERFLOW (size))
6671 return false;
6672 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
6673 if (!valid_constant_size_p (POLY_INT_CST_COEFF (size, i)))
6674 return false;
6675 return true;
6676 }
6677
6678 cst_size_error error;
6679 if (!perr)
6680 perr = &error;
6681
6682 if (TREE_CODE (size) != INTEGER_CST)
6683 {
6684 *perr = cst_size_not_constant;
6685 return false;
6686 }
6687
6688 if (TREE_OVERFLOW_P (size))
6689 {
6690 *perr = cst_size_overflow;
6691 return false;
6692 }
6693
6694 if (tree_int_cst_sgn (size) < 0)
6695 {
6696 *perr = cst_size_negative;
6697 return false;
6698 }
6699 if (!tree_fits_uhwi_p (size)
6700 || (wi::to_widest (TYPE_MAX_VALUE (sizetype))
6701 < wi::to_widest (size) * 2))
6702 {
6703 *perr = cst_size_too_big;
6704 return false;
6705 }
6706
6707 return true;
6708 }
6709
6710 /* Return the precision of the type, or for a complex or vector type the
6711 precision of the type of its elements. */
6712
6713 unsigned int
6714 element_precision (const_tree type)
6715 {
6716 if (!TYPE_P (type))
6717 type = TREE_TYPE (type);
6718 enum tree_code code = TREE_CODE (type);
6719 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
6720 type = TREE_TYPE (type);
6721
6722 return TYPE_PRECISION (type);
6723 }
6724
6725 /* Return true if CODE represents an associative tree code. Otherwise
6726 return false. */
6727 bool
6728 associative_tree_code (enum tree_code code)
6729 {
6730 switch (code)
6731 {
6732 case BIT_IOR_EXPR:
6733 case BIT_AND_EXPR:
6734 case BIT_XOR_EXPR:
6735 case PLUS_EXPR:
6736 case MULT_EXPR:
6737 case MIN_EXPR:
6738 case MAX_EXPR:
6739 return true;
6740
6741 default:
6742 break;
6743 }
6744 return false;
6745 }
6746
6747 /* Return true if CODE represents a commutative tree code. Otherwise
6748 return false. */
6749 bool
6750 commutative_tree_code (enum tree_code code)
6751 {
6752 switch (code)
6753 {
6754 case PLUS_EXPR:
6755 case MULT_EXPR:
6756 case MULT_HIGHPART_EXPR:
6757 case MIN_EXPR:
6758 case MAX_EXPR:
6759 case BIT_IOR_EXPR:
6760 case BIT_XOR_EXPR:
6761 case BIT_AND_EXPR:
6762 case NE_EXPR:
6763 case EQ_EXPR:
6764 case UNORDERED_EXPR:
6765 case ORDERED_EXPR:
6766 case UNEQ_EXPR:
6767 case LTGT_EXPR:
6768 case TRUTH_AND_EXPR:
6769 case TRUTH_XOR_EXPR:
6770 case TRUTH_OR_EXPR:
6771 case WIDEN_MULT_EXPR:
6772 case VEC_WIDEN_MULT_HI_EXPR:
6773 case VEC_WIDEN_MULT_LO_EXPR:
6774 case VEC_WIDEN_MULT_EVEN_EXPR:
6775 case VEC_WIDEN_MULT_ODD_EXPR:
6776 return true;
6777
6778 default:
6779 break;
6780 }
6781 return false;
6782 }
6783
6784 /* Return true if CODE represents a ternary tree code for which the
6785 first two operands are commutative. Otherwise return false. */
6786 bool
6787 commutative_ternary_tree_code (enum tree_code code)
6788 {
6789 switch (code)
6790 {
6791 case WIDEN_MULT_PLUS_EXPR:
6792 case WIDEN_MULT_MINUS_EXPR:
6793 case DOT_PROD_EXPR:
6794 return true;
6795
6796 default:
6797 break;
6798 }
6799 return false;
6800 }
6801
6802 /* Returns true if CODE can overflow. */
6803
6804 bool
6805 operation_can_overflow (enum tree_code code)
6806 {
6807 switch (code)
6808 {
6809 case PLUS_EXPR:
6810 case MINUS_EXPR:
6811 case MULT_EXPR:
6812 case LSHIFT_EXPR:
6813 /* Can overflow in various ways. */
6814 return true;
6815 case TRUNC_DIV_EXPR:
6816 case EXACT_DIV_EXPR:
6817 case FLOOR_DIV_EXPR:
6818 case CEIL_DIV_EXPR:
6819 /* For INT_MIN / -1. */
6820 return true;
6821 case NEGATE_EXPR:
6822 case ABS_EXPR:
6823 /* For -INT_MIN. */
6824 return true;
6825 default:
6826 /* These operators cannot overflow. */
6827 return false;
6828 }
6829 }
6830
6831 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
6832 ftrapv doesn't generate trapping insns for CODE. */
6833
6834 bool
6835 operation_no_trapping_overflow (tree type, enum tree_code code)
6836 {
6837 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type));
6838
6839 /* We don't generate instructions that trap on overflow for complex or vector
6840 types. */
6841 if (!INTEGRAL_TYPE_P (type))
6842 return true;
6843
6844 if (!TYPE_OVERFLOW_TRAPS (type))
6845 return true;
6846
6847 switch (code)
6848 {
6849 case PLUS_EXPR:
6850 case MINUS_EXPR:
6851 case MULT_EXPR:
6852 case NEGATE_EXPR:
6853 case ABS_EXPR:
6854 /* These operators can overflow, and -ftrapv generates trapping code for
6855 these. */
6856 return false;
6857 case TRUNC_DIV_EXPR:
6858 case EXACT_DIV_EXPR:
6859 case FLOOR_DIV_EXPR:
6860 case CEIL_DIV_EXPR:
6861 case LSHIFT_EXPR:
6862 /* These operators can overflow, but -ftrapv does not generate trapping
6863 code for these. */
6864 return true;
6865 default:
6866 /* These operators cannot overflow. */
6867 return true;
6868 }
6869 }
6870
6871 /* Constructors for pointer, array and function types.
6872 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
6873 constructed by language-dependent code, not here.) */
6874
6875 /* Construct, lay out and return the type of pointers to TO_TYPE with
6876 mode MODE. If MODE is VOIDmode, a pointer mode for the address
6877 space of TO_TYPE will be picked. If CAN_ALIAS_ALL is TRUE,
6878 indicate this type can reference all of memory. If such a type has
6879 already been constructed, reuse it. */
6880
6881 tree
6882 build_pointer_type_for_mode (tree to_type, machine_mode mode,
6883 bool can_alias_all)
6884 {
6885 tree t;
6886 bool could_alias = can_alias_all;
6887
6888 if (to_type == error_mark_node)
6889 return error_mark_node;
6890
6891 if (mode == VOIDmode)
6892 {
6893 addr_space_t as = TYPE_ADDR_SPACE (to_type);
6894 mode = targetm.addr_space.pointer_mode (as);
6895 }
6896
6897 /* If the pointed-to type has the may_alias attribute set, force
6898 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
6899 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
6900 can_alias_all = true;
6901
6902 /* In some cases, languages will have things that aren't a POINTER_TYPE
6903 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
6904 In that case, return that type without regard to the rest of our
6905 operands.
6906
6907 ??? This is a kludge, but consistent with the way this function has
6908 always operated and there doesn't seem to be a good way to avoid this
6909 at the moment. */
6910 if (TYPE_POINTER_TO (to_type) != 0
6911 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
6912 return TYPE_POINTER_TO (to_type);
6913
6914 /* First, if we already have a type for pointers to TO_TYPE and it's
6915 the proper mode, use it. */
6916 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
6917 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
6918 return t;
6919
6920 t = make_node (POINTER_TYPE);
6921
6922 TREE_TYPE (t) = to_type;
6923 SET_TYPE_MODE (t, mode);
6924 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
6925 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
6926 TYPE_POINTER_TO (to_type) = t;
6927
6928 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
6929 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
6930 SET_TYPE_STRUCTURAL_EQUALITY (t);
6931 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
6932 TYPE_CANONICAL (t)
6933 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
6934 mode, false);
6935
6936 /* Lay out the type. This function has many callers that are concerned
6937 with expression-construction, and this simplifies them all. */
6938 layout_type (t);
6939
6940 return t;
6941 }
6942
6943 /* By default build pointers in ptr_mode. */
6944
6945 tree
6946 build_pointer_type (tree to_type)
6947 {
6948 return build_pointer_type_for_mode (to_type, VOIDmode, false);
6949 }
6950
6951 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
6952
6953 tree
6954 build_reference_type_for_mode (tree to_type, machine_mode mode,
6955 bool can_alias_all)
6956 {
6957 tree t;
6958 bool could_alias = can_alias_all;
6959
6960 if (to_type == error_mark_node)
6961 return error_mark_node;
6962
6963 if (mode == VOIDmode)
6964 {
6965 addr_space_t as = TYPE_ADDR_SPACE (to_type);
6966 mode = targetm.addr_space.pointer_mode (as);
6967 }
6968
6969 /* If the pointed-to type has the may_alias attribute set, force
6970 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
6971 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
6972 can_alias_all = true;
6973
6974 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
6975 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
6976 In that case, return that type without regard to the rest of our
6977 operands.
6978
6979 ??? This is a kludge, but consistent with the way this function has
6980 always operated and there doesn't seem to be a good way to avoid this
6981 at the moment. */
6982 if (TYPE_REFERENCE_TO (to_type) != 0
6983 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
6984 return TYPE_REFERENCE_TO (to_type);
6985
6986 /* First, if we already have a type for pointers to TO_TYPE and it's
6987 the proper mode, use it. */
6988 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
6989 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
6990 return t;
6991
6992 t = make_node (REFERENCE_TYPE);
6993
6994 TREE_TYPE (t) = to_type;
6995 SET_TYPE_MODE (t, mode);
6996 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
6997 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
6998 TYPE_REFERENCE_TO (to_type) = t;
6999
7000 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
7001 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
7002 SET_TYPE_STRUCTURAL_EQUALITY (t);
7003 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7004 TYPE_CANONICAL (t)
7005 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7006 mode, false);
7007
7008 layout_type (t);
7009
7010 return t;
7011 }
7012
7013
7014 /* Build the node for the type of references-to-TO_TYPE by default
7015 in ptr_mode. */
7016
7017 tree
7018 build_reference_type (tree to_type)
7019 {
7020 return build_reference_type_for_mode (to_type, VOIDmode, false);
7021 }
7022
7023 #define MAX_INT_CACHED_PREC \
7024 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7025 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7026
7027 static void
7028 clear_nonstandard_integer_type_cache (void)
7029 {
7030 for (size_t i = 0 ; i < 2 * MAX_INT_CACHED_PREC + 2 ; i++)
7031 {
7032 nonstandard_integer_type_cache[i] = NULL;
7033 }
7034 }
7035
7036 /* Builds a signed or unsigned integer type of precision PRECISION.
7037 Used for C bitfields whose precision does not match that of
7038 built-in target types. */
7039 tree
7040 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7041 int unsignedp)
7042 {
7043 tree itype, ret;
7044
7045 if (unsignedp)
7046 unsignedp = MAX_INT_CACHED_PREC + 1;
7047
7048 if (precision <= MAX_INT_CACHED_PREC)
7049 {
7050 itype = nonstandard_integer_type_cache[precision + unsignedp];
7051 if (itype)
7052 return itype;
7053 }
7054
7055 itype = make_node (INTEGER_TYPE);
7056 TYPE_PRECISION (itype) = precision;
7057
7058 if (unsignedp)
7059 fixup_unsigned_type (itype);
7060 else
7061 fixup_signed_type (itype);
7062
7063 inchash::hash hstate;
7064 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
7065 ret = type_hash_canon (hstate.end (), itype);
7066 if (precision <= MAX_INT_CACHED_PREC)
7067 nonstandard_integer_type_cache[precision + unsignedp] = ret;
7068
7069 return ret;
7070 }
7071
7072 #define MAX_BOOL_CACHED_PREC \
7073 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7074 static GTY(()) tree nonstandard_boolean_type_cache[MAX_BOOL_CACHED_PREC + 1];
7075
7076 /* Builds a boolean type of precision PRECISION.
7077 Used for boolean vectors to choose proper vector element size. */
7078 tree
7079 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision)
7080 {
7081 tree type;
7082
7083 if (precision <= MAX_BOOL_CACHED_PREC)
7084 {
7085 type = nonstandard_boolean_type_cache[precision];
7086 if (type)
7087 return type;
7088 }
7089
7090 type = make_node (BOOLEAN_TYPE);
7091 TYPE_PRECISION (type) = precision;
7092 fixup_signed_type (type);
7093
7094 if (precision <= MAX_INT_CACHED_PREC)
7095 nonstandard_boolean_type_cache[precision] = type;
7096
7097 return type;
7098 }
7099
7100 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7101 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7102 is true, reuse such a type that has already been constructed. */
7103
7104 static tree
7105 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7106 {
7107 tree itype = make_node (INTEGER_TYPE);
7108
7109 TREE_TYPE (itype) = type;
7110
7111 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7112 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7113
7114 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7115 SET_TYPE_MODE (itype, TYPE_MODE (type));
7116 TYPE_SIZE (itype) = TYPE_SIZE (type);
7117 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7118 SET_TYPE_ALIGN (itype, TYPE_ALIGN (type));
7119 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7120 SET_TYPE_WARN_IF_NOT_ALIGN (itype, TYPE_WARN_IF_NOT_ALIGN (type));
7121
7122 if (!shared)
7123 return itype;
7124
7125 if ((TYPE_MIN_VALUE (itype)
7126 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7127 || (TYPE_MAX_VALUE (itype)
7128 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7129 {
7130 /* Since we cannot reliably merge this type, we need to compare it using
7131 structural equality checks. */
7132 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7133 return itype;
7134 }
7135
7136 hashval_t hash = type_hash_canon_hash (itype);
7137 itype = type_hash_canon (hash, itype);
7138
7139 return itype;
7140 }
7141
7142 /* Wrapper around build_range_type_1 with SHARED set to true. */
7143
7144 tree
7145 build_range_type (tree type, tree lowval, tree highval)
7146 {
7147 return build_range_type_1 (type, lowval, highval, true);
7148 }
7149
7150 /* Wrapper around build_range_type_1 with SHARED set to false. */
7151
7152 tree
7153 build_nonshared_range_type (tree type, tree lowval, tree highval)
7154 {
7155 return build_range_type_1 (type, lowval, highval, false);
7156 }
7157
7158 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7159 MAXVAL should be the maximum value in the domain
7160 (one less than the length of the array).
7161
7162 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7163 We don't enforce this limit, that is up to caller (e.g. language front end).
7164 The limit exists because the result is a signed type and we don't handle
7165 sizes that use more than one HOST_WIDE_INT. */
7166
7167 tree
7168 build_index_type (tree maxval)
7169 {
7170 return build_range_type (sizetype, size_zero_node, maxval);
7171 }
7172
7173 /* Return true if the debug information for TYPE, a subtype, should be emitted
7174 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7175 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7176 debug info and doesn't reflect the source code. */
7177
7178 bool
7179 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7180 {
7181 tree base_type = TREE_TYPE (type), low, high;
7182
7183 /* Subrange types have a base type which is an integral type. */
7184 if (!INTEGRAL_TYPE_P (base_type))
7185 return false;
7186
7187 /* Get the real bounds of the subtype. */
7188 if (lang_hooks.types.get_subrange_bounds)
7189 lang_hooks.types.get_subrange_bounds (type, &low, &high);
7190 else
7191 {
7192 low = TYPE_MIN_VALUE (type);
7193 high = TYPE_MAX_VALUE (type);
7194 }
7195
7196 /* If the type and its base type have the same representation and the same
7197 name, then the type is not a subrange but a copy of the base type. */
7198 if ((TREE_CODE (base_type) == INTEGER_TYPE
7199 || TREE_CODE (base_type) == BOOLEAN_TYPE)
7200 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
7201 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
7202 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
7203 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
7204 return false;
7205
7206 if (lowval)
7207 *lowval = low;
7208 if (highval)
7209 *highval = high;
7210 return true;
7211 }
7212
7213 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7214 and number of elements specified by the range of values of INDEX_TYPE.
7215 If TYPELESS_STORAGE is true, TYPE_TYPELESS_STORAGE flag is set on the type.
7216 If SHARED is true, reuse such a type that has already been constructed.
7217 If SET_CANONICAL is true, compute TYPE_CANONICAL from the element type. */
7218
7219 tree
7220 build_array_type_1 (tree elt_type, tree index_type, bool typeless_storage,
7221 bool shared, bool set_canonical)
7222 {
7223 tree t;
7224
7225 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
7226 {
7227 error ("arrays of functions are not meaningful");
7228 elt_type = integer_type_node;
7229 }
7230
7231 t = make_node (ARRAY_TYPE);
7232 TREE_TYPE (t) = elt_type;
7233 TYPE_DOMAIN (t) = index_type;
7234 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
7235 TYPE_TYPELESS_STORAGE (t) = typeless_storage;
7236 layout_type (t);
7237
7238 if (shared)
7239 {
7240 hashval_t hash = type_hash_canon_hash (t);
7241 t = type_hash_canon (hash, t);
7242 }
7243
7244 if (TYPE_CANONICAL (t) == t && set_canonical)
7245 {
7246 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
7247 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type))
7248 || in_lto_p)
7249 SET_TYPE_STRUCTURAL_EQUALITY (t);
7250 else if (TYPE_CANONICAL (elt_type) != elt_type
7251 || (index_type && TYPE_CANONICAL (index_type) != index_type))
7252 TYPE_CANONICAL (t)
7253 = build_array_type_1 (TYPE_CANONICAL (elt_type),
7254 index_type
7255 ? TYPE_CANONICAL (index_type) : NULL_TREE,
7256 typeless_storage, shared, set_canonical);
7257 }
7258
7259 return t;
7260 }
7261
7262 /* Wrapper around build_array_type_1 with SHARED set to true. */
7263
7264 tree
7265 build_array_type (tree elt_type, tree index_type, bool typeless_storage)
7266 {
7267 return
7268 build_array_type_1 (elt_type, index_type, typeless_storage, true, true);
7269 }
7270
7271 /* Wrapper around build_array_type_1 with SHARED set to false. */
7272
7273 tree
7274 build_nonshared_array_type (tree elt_type, tree index_type)
7275 {
7276 return build_array_type_1 (elt_type, index_type, false, false, true);
7277 }
7278
7279 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7280 sizetype. */
7281
7282 tree
7283 build_array_type_nelts (tree elt_type, poly_uint64 nelts)
7284 {
7285 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
7286 }
7287
7288 /* Recursively examines the array elements of TYPE, until a non-array
7289 element type is found. */
7290
7291 tree
7292 strip_array_types (tree type)
7293 {
7294 while (TREE_CODE (type) == ARRAY_TYPE)
7295 type = TREE_TYPE (type);
7296
7297 return type;
7298 }
7299
7300 /* Computes the canonical argument types from the argument type list
7301 ARGTYPES.
7302
7303 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7304 on entry to this function, or if any of the ARGTYPES are
7305 structural.
7306
7307 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7308 true on entry to this function, or if any of the ARGTYPES are
7309 non-canonical.
7310
7311 Returns a canonical argument list, which may be ARGTYPES when the
7312 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7313 true) or would not differ from ARGTYPES. */
7314
7315 static tree
7316 maybe_canonicalize_argtypes (tree argtypes,
7317 bool *any_structural_p,
7318 bool *any_noncanonical_p)
7319 {
7320 tree arg;
7321 bool any_noncanonical_argtypes_p = false;
7322
7323 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
7324 {
7325 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
7326 /* Fail gracefully by stating that the type is structural. */
7327 *any_structural_p = true;
7328 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
7329 *any_structural_p = true;
7330 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
7331 || TREE_PURPOSE (arg))
7332 /* If the argument has a default argument, we consider it
7333 non-canonical even though the type itself is canonical.
7334 That way, different variants of function and method types
7335 with default arguments will all point to the variant with
7336 no defaults as their canonical type. */
7337 any_noncanonical_argtypes_p = true;
7338 }
7339
7340 if (*any_structural_p)
7341 return argtypes;
7342
7343 if (any_noncanonical_argtypes_p)
7344 {
7345 /* Build the canonical list of argument types. */
7346 tree canon_argtypes = NULL_TREE;
7347 bool is_void = false;
7348
7349 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
7350 {
7351 if (arg == void_list_node)
7352 is_void = true;
7353 else
7354 canon_argtypes = tree_cons (NULL_TREE,
7355 TYPE_CANONICAL (TREE_VALUE (arg)),
7356 canon_argtypes);
7357 }
7358
7359 canon_argtypes = nreverse (canon_argtypes);
7360 if (is_void)
7361 canon_argtypes = chainon (canon_argtypes, void_list_node);
7362
7363 /* There is a non-canonical type. */
7364 *any_noncanonical_p = true;
7365 return canon_argtypes;
7366 }
7367
7368 /* The canonical argument types are the same as ARGTYPES. */
7369 return argtypes;
7370 }
7371
7372 /* Construct, lay out and return
7373 the type of functions returning type VALUE_TYPE
7374 given arguments of types ARG_TYPES.
7375 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
7376 are data type nodes for the arguments of the function.
7377 NO_NAMED_ARGS_STDARG_P is true if this is a prototyped
7378 variable-arguments function with (...) prototype (no named arguments).
7379 If such a type has already been constructed, reuse it. */
7380
7381 tree
7382 build_function_type (tree value_type, tree arg_types,
7383 bool no_named_args_stdarg_p)
7384 {
7385 tree t;
7386 inchash::hash hstate;
7387 bool any_structural_p, any_noncanonical_p;
7388 tree canon_argtypes;
7389
7390 gcc_assert (arg_types != error_mark_node);
7391
7392 if (TREE_CODE (value_type) == FUNCTION_TYPE)
7393 {
7394 error ("function return type cannot be function");
7395 value_type = integer_type_node;
7396 }
7397
7398 /* Make a node of the sort we want. */
7399 t = make_node (FUNCTION_TYPE);
7400 TREE_TYPE (t) = value_type;
7401 TYPE_ARG_TYPES (t) = arg_types;
7402 if (no_named_args_stdarg_p)
7403 {
7404 gcc_assert (arg_types == NULL_TREE);
7405 TYPE_NO_NAMED_ARGS_STDARG_P (t) = 1;
7406 }
7407
7408 /* If we already have such a type, use the old one. */
7409 hashval_t hash = type_hash_canon_hash (t);
7410 t = type_hash_canon (hash, t);
7411
7412 /* Set up the canonical type. */
7413 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
7414 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
7415 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
7416 &any_structural_p,
7417 &any_noncanonical_p);
7418 if (any_structural_p)
7419 SET_TYPE_STRUCTURAL_EQUALITY (t);
7420 else if (any_noncanonical_p)
7421 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
7422 canon_argtypes);
7423
7424 if (!COMPLETE_TYPE_P (t))
7425 layout_type (t);
7426 return t;
7427 }
7428
7429 /* Build a function type. The RETURN_TYPE is the type returned by the
7430 function. If VAARGS is set, no void_type_node is appended to the
7431 list. ARGP must be always be terminated be a NULL_TREE. */
7432
7433 static tree
7434 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
7435 {
7436 tree t, args, last;
7437
7438 t = va_arg (argp, tree);
7439 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
7440 args = tree_cons (NULL_TREE, t, args);
7441
7442 if (vaargs)
7443 {
7444 last = args;
7445 if (args != NULL_TREE)
7446 args = nreverse (args);
7447 gcc_assert (last != void_list_node);
7448 }
7449 else if (args == NULL_TREE)
7450 args = void_list_node;
7451 else
7452 {
7453 last = args;
7454 args = nreverse (args);
7455 TREE_CHAIN (last) = void_list_node;
7456 }
7457 args = build_function_type (return_type, args, vaargs && args == NULL_TREE);
7458
7459 return args;
7460 }
7461
7462 /* Build a function type. The RETURN_TYPE is the type returned by the
7463 function. If additional arguments are provided, they are
7464 additional argument types. The list of argument types must always
7465 be terminated by NULL_TREE. */
7466
7467 tree
7468 build_function_type_list (tree return_type, ...)
7469 {
7470 tree args;
7471 va_list p;
7472
7473 va_start (p, return_type);
7474 args = build_function_type_list_1 (false, return_type, p);
7475 va_end (p);
7476 return args;
7477 }
7478
7479 /* Build a variable argument function type. The RETURN_TYPE is the
7480 type returned by the function. If additional arguments are provided,
7481 they are additional argument types. The list of argument types must
7482 always be terminated by NULL_TREE. */
7483
7484 tree
7485 build_varargs_function_type_list (tree return_type, ...)
7486 {
7487 tree args;
7488 va_list p;
7489
7490 va_start (p, return_type);
7491 args = build_function_type_list_1 (true, return_type, p);
7492 va_end (p);
7493
7494 return args;
7495 }
7496
7497 /* Build a function type. RETURN_TYPE is the type returned by the
7498 function; VAARGS indicates whether the function takes varargs. The
7499 function takes N named arguments, the types of which are provided in
7500 ARG_TYPES. */
7501
7502 static tree
7503 build_function_type_array_1 (bool vaargs, tree return_type, int n,
7504 tree *arg_types)
7505 {
7506 int i;
7507 tree t = vaargs ? NULL_TREE : void_list_node;
7508
7509 for (i = n - 1; i >= 0; i--)
7510 t = tree_cons (NULL_TREE, arg_types[i], t);
7511
7512 return build_function_type (return_type, t, vaargs && n == 0);
7513 }
7514
7515 /* Build a function type. RETURN_TYPE is the type returned by the
7516 function. The function takes N named arguments, the types of which
7517 are provided in ARG_TYPES. */
7518
7519 tree
7520 build_function_type_array (tree return_type, int n, tree *arg_types)
7521 {
7522 return build_function_type_array_1 (false, return_type, n, arg_types);
7523 }
7524
7525 /* Build a variable argument function type. RETURN_TYPE is the type
7526 returned by the function. The function takes N named arguments, the
7527 types of which are provided in ARG_TYPES. */
7528
7529 tree
7530 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
7531 {
7532 return build_function_type_array_1 (true, return_type, n, arg_types);
7533 }
7534
7535 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
7536 and ARGTYPES (a TREE_LIST) are the return type and arguments types
7537 for the method. An implicit additional parameter (of type
7538 pointer-to-BASETYPE) is added to the ARGTYPES. */
7539
7540 tree
7541 build_method_type_directly (tree basetype,
7542 tree rettype,
7543 tree argtypes)
7544 {
7545 tree t;
7546 tree ptype;
7547 bool any_structural_p, any_noncanonical_p;
7548 tree canon_argtypes;
7549
7550 /* Make a node of the sort we want. */
7551 t = make_node (METHOD_TYPE);
7552
7553 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
7554 TREE_TYPE (t) = rettype;
7555 ptype = build_pointer_type (basetype);
7556
7557 /* The actual arglist for this function includes a "hidden" argument
7558 which is "this". Put it into the list of argument types. */
7559 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
7560 TYPE_ARG_TYPES (t) = argtypes;
7561
7562 /* If we already have such a type, use the old one. */
7563 hashval_t hash = type_hash_canon_hash (t);
7564 t = type_hash_canon (hash, t);
7565
7566 /* Set up the canonical type. */
7567 any_structural_p
7568 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
7569 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
7570 any_noncanonical_p
7571 = (TYPE_CANONICAL (basetype) != basetype
7572 || TYPE_CANONICAL (rettype) != rettype);
7573 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
7574 &any_structural_p,
7575 &any_noncanonical_p);
7576 if (any_structural_p)
7577 SET_TYPE_STRUCTURAL_EQUALITY (t);
7578 else if (any_noncanonical_p)
7579 TYPE_CANONICAL (t)
7580 = build_method_type_directly (TYPE_CANONICAL (basetype),
7581 TYPE_CANONICAL (rettype),
7582 canon_argtypes);
7583 if (!COMPLETE_TYPE_P (t))
7584 layout_type (t);
7585
7586 return t;
7587 }
7588
7589 /* Construct, lay out and return the type of methods belonging to class
7590 BASETYPE and whose arguments and values are described by TYPE.
7591 If that type exists already, reuse it.
7592 TYPE must be a FUNCTION_TYPE node. */
7593
7594 tree
7595 build_method_type (tree basetype, tree type)
7596 {
7597 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
7598
7599 return build_method_type_directly (basetype,
7600 TREE_TYPE (type),
7601 TYPE_ARG_TYPES (type));
7602 }
7603
7604 /* Construct, lay out and return the type of offsets to a value
7605 of type TYPE, within an object of type BASETYPE.
7606 If a suitable offset type exists already, reuse it. */
7607
7608 tree
7609 build_offset_type (tree basetype, tree type)
7610 {
7611 tree t;
7612
7613 /* Make a node of the sort we want. */
7614 t = make_node (OFFSET_TYPE);
7615
7616 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
7617 TREE_TYPE (t) = type;
7618
7619 /* If we already have such a type, use the old one. */
7620 hashval_t hash = type_hash_canon_hash (t);
7621 t = type_hash_canon (hash, t);
7622
7623 if (!COMPLETE_TYPE_P (t))
7624 layout_type (t);
7625
7626 if (TYPE_CANONICAL (t) == t)
7627 {
7628 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
7629 || TYPE_STRUCTURAL_EQUALITY_P (type))
7630 SET_TYPE_STRUCTURAL_EQUALITY (t);
7631 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
7632 || TYPE_CANONICAL (type) != type)
7633 TYPE_CANONICAL (t)
7634 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
7635 TYPE_CANONICAL (type));
7636 }
7637
7638 return t;
7639 }
7640
7641 /* Create a complex type whose components are COMPONENT_TYPE.
7642
7643 If NAMED is true, the type is given a TYPE_NAME. We do not always
7644 do so because this creates a DECL node and thus make the DECL_UIDs
7645 dependent on the type canonicalization hashtable, which is GC-ed,
7646 so the DECL_UIDs would not be stable wrt garbage collection. */
7647
7648 tree
7649 build_complex_type (tree component_type, bool named)
7650 {
7651 gcc_assert (INTEGRAL_TYPE_P (component_type)
7652 || SCALAR_FLOAT_TYPE_P (component_type)
7653 || FIXED_POINT_TYPE_P (component_type));
7654
7655 /* Make a node of the sort we want. */
7656 tree probe = make_node (COMPLEX_TYPE);
7657
7658 TREE_TYPE (probe) = TYPE_MAIN_VARIANT (component_type);
7659
7660 /* If we already have such a type, use the old one. */
7661 hashval_t hash = type_hash_canon_hash (probe);
7662 tree t = type_hash_canon (hash, probe);
7663
7664 if (t == probe)
7665 {
7666 /* We created a new type. The hash insertion will have laid
7667 out the type. We need to check the canonicalization and
7668 maybe set the name. */
7669 gcc_checking_assert (COMPLETE_TYPE_P (t)
7670 && !TYPE_NAME (t)
7671 && TYPE_CANONICAL (t) == t);
7672
7673 if (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (t)))
7674 SET_TYPE_STRUCTURAL_EQUALITY (t);
7675 else if (TYPE_CANONICAL (TREE_TYPE (t)) != TREE_TYPE (t))
7676 TYPE_CANONICAL (t)
7677 = build_complex_type (TYPE_CANONICAL (TREE_TYPE (t)), named);
7678
7679 /* We need to create a name, since complex is a fundamental type. */
7680 if (named)
7681 {
7682 const char *name = NULL;
7683
7684 if (TREE_TYPE (t) == char_type_node)
7685 name = "complex char";
7686 else if (TREE_TYPE (t) == signed_char_type_node)
7687 name = "complex signed char";
7688 else if (TREE_TYPE (t) == unsigned_char_type_node)
7689 name = "complex unsigned char";
7690 else if (TREE_TYPE (t) == short_integer_type_node)
7691 name = "complex short int";
7692 else if (TREE_TYPE (t) == short_unsigned_type_node)
7693 name = "complex short unsigned int";
7694 else if (TREE_TYPE (t) == integer_type_node)
7695 name = "complex int";
7696 else if (TREE_TYPE (t) == unsigned_type_node)
7697 name = "complex unsigned int";
7698 else if (TREE_TYPE (t) == long_integer_type_node)
7699 name = "complex long int";
7700 else if (TREE_TYPE (t) == long_unsigned_type_node)
7701 name = "complex long unsigned int";
7702 else if (TREE_TYPE (t) == long_long_integer_type_node)
7703 name = "complex long long int";
7704 else if (TREE_TYPE (t) == long_long_unsigned_type_node)
7705 name = "complex long long unsigned int";
7706
7707 if (name != NULL)
7708 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
7709 get_identifier (name), t);
7710 }
7711 }
7712
7713 return build_qualified_type (t, TYPE_QUALS (component_type));
7714 }
7715
7716 /* If TYPE is a real or complex floating-point type and the target
7717 does not directly support arithmetic on TYPE then return the wider
7718 type to be used for arithmetic on TYPE. Otherwise, return
7719 NULL_TREE. */
7720
7721 tree
7722 excess_precision_type (tree type)
7723 {
7724 /* The target can give two different responses to the question of
7725 which excess precision mode it would like depending on whether we
7726 are in -fexcess-precision=standard or -fexcess-precision=fast. */
7727
7728 enum excess_precision_type requested_type
7729 = (flag_excess_precision == EXCESS_PRECISION_FAST
7730 ? EXCESS_PRECISION_TYPE_FAST
7731 : (flag_excess_precision == EXCESS_PRECISION_FLOAT16
7732 ? EXCESS_PRECISION_TYPE_FLOAT16 : EXCESS_PRECISION_TYPE_STANDARD));
7733
7734 enum flt_eval_method target_flt_eval_method
7735 = targetm.c.excess_precision (requested_type);
7736
7737 /* The target should not ask for unpredictable float evaluation (though
7738 it might advertise that implicitly the evaluation is unpredictable,
7739 but we don't care about that here, it will have been reported
7740 elsewhere). If it does ask for unpredictable evaluation, we have
7741 nothing to do here. */
7742 gcc_assert (target_flt_eval_method != FLT_EVAL_METHOD_UNPREDICTABLE);
7743
7744 /* Nothing to do. The target has asked for all types we know about
7745 to be computed with their native precision and range. */
7746 if (target_flt_eval_method == FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16)
7747 return NULL_TREE;
7748
7749 /* The target will promote this type in a target-dependent way, so excess
7750 precision ought to leave it alone. */
7751 if (targetm.promoted_type (type) != NULL_TREE)
7752 return NULL_TREE;
7753
7754 machine_mode float16_type_mode = (float16_type_node
7755 ? TYPE_MODE (float16_type_node)
7756 : VOIDmode);
7757 machine_mode bfloat16_type_mode = (bfloat16_type_node
7758 ? TYPE_MODE (bfloat16_type_node)
7759 : VOIDmode);
7760 machine_mode float_type_mode = TYPE_MODE (float_type_node);
7761 machine_mode double_type_mode = TYPE_MODE (double_type_node);
7762
7763 switch (TREE_CODE (type))
7764 {
7765 case REAL_TYPE:
7766 {
7767 machine_mode type_mode = TYPE_MODE (type);
7768 switch (target_flt_eval_method)
7769 {
7770 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
7771 if (type_mode == float16_type_mode
7772 || type_mode == bfloat16_type_mode)
7773 return float_type_node;
7774 break;
7775 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
7776 if (type_mode == float16_type_mode
7777 || type_mode == bfloat16_type_mode
7778 || type_mode == float_type_mode)
7779 return double_type_node;
7780 break;
7781 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
7782 if (type_mode == float16_type_mode
7783 || type_mode == bfloat16_type_mode
7784 || type_mode == float_type_mode
7785 || type_mode == double_type_mode)
7786 return long_double_type_node;
7787 break;
7788 default:
7789 gcc_unreachable ();
7790 }
7791 break;
7792 }
7793 case COMPLEX_TYPE:
7794 {
7795 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
7796 return NULL_TREE;
7797 machine_mode type_mode = TYPE_MODE (TREE_TYPE (type));
7798 switch (target_flt_eval_method)
7799 {
7800 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
7801 if (type_mode == float16_type_mode
7802 || type_mode == bfloat16_type_mode)
7803 return complex_float_type_node;
7804 break;
7805 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
7806 if (type_mode == float16_type_mode
7807 || type_mode == bfloat16_type_mode
7808 || type_mode == float_type_mode)
7809 return complex_double_type_node;
7810 break;
7811 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
7812 if (type_mode == float16_type_mode
7813 || type_mode == bfloat16_type_mode
7814 || type_mode == float_type_mode
7815 || type_mode == double_type_mode)
7816 return complex_long_double_type_node;
7817 break;
7818 default:
7819 gcc_unreachable ();
7820 }
7821 break;
7822 }
7823 default:
7824 break;
7825 }
7826
7827 return NULL_TREE;
7828 }
7829 \f
7830 /* Return OP, stripped of any conversions to wider types as much as is safe.
7831 Converting the value back to OP's type makes a value equivalent to OP.
7832
7833 If FOR_TYPE is nonzero, we return a value which, if converted to
7834 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
7835
7836 OP must have integer, real or enumeral type. Pointers are not allowed!
7837
7838 There are some cases where the obvious value we could return
7839 would regenerate to OP if converted to OP's type,
7840 but would not extend like OP to wider types.
7841 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
7842 For example, if OP is (unsigned short)(signed char)-1,
7843 we avoid returning (signed char)-1 if FOR_TYPE is int,
7844 even though extending that to an unsigned short would regenerate OP,
7845 since the result of extending (signed char)-1 to (int)
7846 is different from (int) OP. */
7847
7848 tree
7849 get_unwidened (tree op, tree for_type)
7850 {
7851 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
7852 tree type = TREE_TYPE (op);
7853 unsigned final_prec
7854 = TYPE_PRECISION (for_type != 0 ? for_type : type);
7855 int uns
7856 = (for_type != 0 && for_type != type
7857 && final_prec > TYPE_PRECISION (type)
7858 && TYPE_UNSIGNED (type));
7859 tree win = op;
7860
7861 while (CONVERT_EXPR_P (op))
7862 {
7863 int bitschange;
7864
7865 /* TYPE_PRECISION on vector types has different meaning
7866 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
7867 so avoid them here. */
7868 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
7869 break;
7870
7871 bitschange = TYPE_PRECISION (TREE_TYPE (op))
7872 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
7873
7874 /* Truncations are many-one so cannot be removed.
7875 Unless we are later going to truncate down even farther. */
7876 if (bitschange < 0
7877 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
7878 break;
7879
7880 /* See what's inside this conversion. If we decide to strip it,
7881 we will set WIN. */
7882 op = TREE_OPERAND (op, 0);
7883
7884 /* If we have not stripped any zero-extensions (uns is 0),
7885 we can strip any kind of extension.
7886 If we have previously stripped a zero-extension,
7887 only zero-extensions can safely be stripped.
7888 Any extension can be stripped if the bits it would produce
7889 are all going to be discarded later by truncating to FOR_TYPE. */
7890
7891 if (bitschange > 0)
7892 {
7893 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
7894 win = op;
7895 /* TYPE_UNSIGNED says whether this is a zero-extension.
7896 Let's avoid computing it if it does not affect WIN
7897 and if UNS will not be needed again. */
7898 if ((uns
7899 || CONVERT_EXPR_P (op))
7900 && TYPE_UNSIGNED (TREE_TYPE (op)))
7901 {
7902 uns = 1;
7903 win = op;
7904 }
7905 }
7906 }
7907
7908 /* If we finally reach a constant see if it fits in sth smaller and
7909 in that case convert it. */
7910 if (TREE_CODE (win) == INTEGER_CST)
7911 {
7912 tree wtype = TREE_TYPE (win);
7913 unsigned prec = wi::min_precision (wi::to_wide (win), TYPE_SIGN (wtype));
7914 if (for_type)
7915 prec = MAX (prec, final_prec);
7916 if (prec < TYPE_PRECISION (wtype))
7917 {
7918 tree t = lang_hooks.types.type_for_size (prec, TYPE_UNSIGNED (wtype));
7919 if (t && TYPE_PRECISION (t) < TYPE_PRECISION (wtype))
7920 win = fold_convert (t, win);
7921 }
7922 }
7923
7924 return win;
7925 }
7926 \f
7927 /* Return OP or a simpler expression for a narrower value
7928 which can be sign-extended or zero-extended to give back OP.
7929 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
7930 or 0 if the value should be sign-extended. */
7931
7932 tree
7933 get_narrower (tree op, int *unsignedp_ptr)
7934 {
7935 int uns = 0;
7936 int first = 1;
7937 tree win = op;
7938 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
7939
7940 if (TREE_CODE (op) == COMPOUND_EXPR)
7941 {
7942 do
7943 op = TREE_OPERAND (op, 1);
7944 while (TREE_CODE (op) == COMPOUND_EXPR);
7945 tree ret = get_narrower (op, unsignedp_ptr);
7946 if (ret == op)
7947 return win;
7948 auto_vec <tree, 16> v;
7949 unsigned int i;
7950 for (op = win; TREE_CODE (op) == COMPOUND_EXPR;
7951 op = TREE_OPERAND (op, 1))
7952 v.safe_push (op);
7953 FOR_EACH_VEC_ELT_REVERSE (v, i, op)
7954 ret = build2_loc (EXPR_LOCATION (op), COMPOUND_EXPR,
7955 TREE_TYPE (ret), TREE_OPERAND (op, 0),
7956 ret);
7957 return ret;
7958 }
7959 while (TREE_CODE (op) == NOP_EXPR)
7960 {
7961 int bitschange
7962 = (TYPE_PRECISION (TREE_TYPE (op))
7963 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
7964
7965 /* Truncations are many-one so cannot be removed. */
7966 if (bitschange < 0)
7967 break;
7968
7969 /* See what's inside this conversion. If we decide to strip it,
7970 we will set WIN. */
7971
7972 if (bitschange > 0)
7973 {
7974 op = TREE_OPERAND (op, 0);
7975 /* An extension: the outermost one can be stripped,
7976 but remember whether it is zero or sign extension. */
7977 if (first)
7978 uns = TYPE_UNSIGNED (TREE_TYPE (op));
7979 /* Otherwise, if a sign extension has been stripped,
7980 only sign extensions can now be stripped;
7981 if a zero extension has been stripped, only zero-extensions. */
7982 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
7983 break;
7984 first = 0;
7985 }
7986 else /* bitschange == 0 */
7987 {
7988 /* A change in nominal type can always be stripped, but we must
7989 preserve the unsignedness. */
7990 if (first)
7991 uns = TYPE_UNSIGNED (TREE_TYPE (op));
7992 first = 0;
7993 op = TREE_OPERAND (op, 0);
7994 /* Keep trying to narrow, but don't assign op to win if it
7995 would turn an integral type into something else. */
7996 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
7997 continue;
7998 }
7999
8000 win = op;
8001 }
8002
8003 if (TREE_CODE (op) == COMPONENT_REF
8004 /* Since type_for_size always gives an integer type. */
8005 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8006 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8007 /* Ensure field is laid out already. */
8008 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8009 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8010 {
8011 unsigned HOST_WIDE_INT innerprec
8012 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8013 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8014 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8015 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8016
8017 /* We can get this structure field in a narrower type that fits it,
8018 but the resulting extension to its nominal type (a fullword type)
8019 must satisfy the same conditions as for other extensions.
8020
8021 Do this only for fields that are aligned (not bit-fields),
8022 because when bit-field insns will be used there is no
8023 advantage in doing this. */
8024
8025 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8026 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8027 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8028 && type != 0)
8029 {
8030 if (first)
8031 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8032 win = fold_convert (type, op);
8033 }
8034 }
8035
8036 *unsignedp_ptr = uns;
8037 return win;
8038 }
8039 \f
8040 /* Return true if integer constant C has a value that is permissible
8041 for TYPE, an integral type. */
8042
8043 bool
8044 int_fits_type_p (const_tree c, const_tree type)
8045 {
8046 tree type_low_bound, type_high_bound;
8047 bool ok_for_low_bound, ok_for_high_bound;
8048 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8049
8050 /* Non-standard boolean types can have arbitrary precision but various
8051 transformations assume that they can only take values 0 and +/-1. */
8052 if (TREE_CODE (type) == BOOLEAN_TYPE)
8053 return wi::fits_to_boolean_p (wi::to_wide (c), type);
8054
8055 retry:
8056 type_low_bound = TYPE_MIN_VALUE (type);
8057 type_high_bound = TYPE_MAX_VALUE (type);
8058
8059 /* If at least one bound of the type is a constant integer, we can check
8060 ourselves and maybe make a decision. If no such decision is possible, but
8061 this type is a subtype, try checking against that. Otherwise, use
8062 fits_to_tree_p, which checks against the precision.
8063
8064 Compute the status for each possibly constant bound, and return if we see
8065 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8066 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8067 for "constant known to fit". */
8068
8069 /* Check if c >= type_low_bound. */
8070 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8071 {
8072 if (tree_int_cst_lt (c, type_low_bound))
8073 return false;
8074 ok_for_low_bound = true;
8075 }
8076 else
8077 ok_for_low_bound = false;
8078
8079 /* Check if c <= type_high_bound. */
8080 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8081 {
8082 if (tree_int_cst_lt (type_high_bound, c))
8083 return false;
8084 ok_for_high_bound = true;
8085 }
8086 else
8087 ok_for_high_bound = false;
8088
8089 /* If the constant fits both bounds, the result is known. */
8090 if (ok_for_low_bound && ok_for_high_bound)
8091 return true;
8092
8093 /* Perform some generic filtering which may allow making a decision
8094 even if the bounds are not constant. First, negative integers
8095 never fit in unsigned types, */
8096 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (wi::to_wide (c)))
8097 return false;
8098
8099 /* Second, narrower types always fit in wider ones. */
8100 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8101 return true;
8102
8103 /* Third, unsigned integers with top bit set never fit signed types. */
8104 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8105 {
8106 int prec = GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (TREE_TYPE (c))) - 1;
8107 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8108 {
8109 /* When a tree_cst is converted to a wide-int, the precision
8110 is taken from the type. However, if the precision of the
8111 mode underneath the type is smaller than that, it is
8112 possible that the value will not fit. The test below
8113 fails if any bit is set between the sign bit of the
8114 underlying mode and the top bit of the type. */
8115 if (wi::zext (wi::to_wide (c), prec - 1) != wi::to_wide (c))
8116 return false;
8117 }
8118 else if (wi::neg_p (wi::to_wide (c)))
8119 return false;
8120 }
8121
8122 /* If we haven't been able to decide at this point, there nothing more we
8123 can check ourselves here. Look at the base type if we have one and it
8124 has the same precision. */
8125 if (TREE_CODE (type) == INTEGER_TYPE
8126 && TREE_TYPE (type) != 0
8127 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8128 {
8129 type = TREE_TYPE (type);
8130 goto retry;
8131 }
8132
8133 /* Or to fits_to_tree_p, if nothing else. */
8134 return wi::fits_to_tree_p (wi::to_wide (c), type);
8135 }
8136
8137 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8138 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8139 represented (assuming two's-complement arithmetic) within the bit
8140 precision of the type are returned instead. */
8141
8142 void
8143 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8144 {
8145 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8146 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8147 wi::to_mpz (wi::to_wide (TYPE_MIN_VALUE (type)), min, TYPE_SIGN (type));
8148 else
8149 {
8150 if (TYPE_UNSIGNED (type))
8151 mpz_set_ui (min, 0);
8152 else
8153 {
8154 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8155 wi::to_mpz (mn, min, SIGNED);
8156 }
8157 }
8158
8159 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8160 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8161 wi::to_mpz (wi::to_wide (TYPE_MAX_VALUE (type)), max, TYPE_SIGN (type));
8162 else
8163 {
8164 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8165 wi::to_mpz (mn, max, TYPE_SIGN (type));
8166 }
8167 }
8168
8169 /* Return true if VAR is an automatic variable. */
8170
8171 bool
8172 auto_var_p (const_tree var)
8173 {
8174 return ((((VAR_P (var) && ! DECL_EXTERNAL (var))
8175 || TREE_CODE (var) == PARM_DECL)
8176 && ! TREE_STATIC (var))
8177 || TREE_CODE (var) == RESULT_DECL);
8178 }
8179
8180 /* Return true if VAR is an automatic variable defined in function FN. */
8181
8182 bool
8183 auto_var_in_fn_p (const_tree var, const_tree fn)
8184 {
8185 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8186 && (auto_var_p (var)
8187 || TREE_CODE (var) == LABEL_DECL));
8188 }
8189
8190 /* Subprogram of following function. Called by walk_tree.
8191
8192 Return *TP if it is an automatic variable or parameter of the
8193 function passed in as DATA. */
8194
8195 static tree
8196 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8197 {
8198 tree fn = (tree) data;
8199
8200 if (TYPE_P (*tp))
8201 *walk_subtrees = 0;
8202
8203 else if (DECL_P (*tp)
8204 && auto_var_in_fn_p (*tp, fn))
8205 return *tp;
8206
8207 return NULL_TREE;
8208 }
8209
8210 /* Returns true if T is, contains, or refers to a type with variable
8211 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8212 arguments, but not the return type. If FN is nonzero, only return
8213 true if a modifier of the type or position of FN is a variable or
8214 parameter inside FN.
8215
8216 This concept is more general than that of C99 'variably modified types':
8217 in C99, a struct type is never variably modified because a VLA may not
8218 appear as a structure member. However, in GNU C code like:
8219
8220 struct S { int i[f()]; };
8221
8222 is valid, and other languages may define similar constructs. */
8223
8224 bool
8225 variably_modified_type_p (tree type, tree fn)
8226 {
8227 tree t;
8228
8229 /* Test if T is either variable (if FN is zero) or an expression containing
8230 a variable in FN. If TYPE isn't gimplified, return true also if
8231 gimplify_one_sizepos would gimplify the expression into a local
8232 variable. */
8233 #define RETURN_TRUE_IF_VAR(T) \
8234 do { tree _t = (T); \
8235 if (_t != NULL_TREE \
8236 && _t != error_mark_node \
8237 && !CONSTANT_CLASS_P (_t) \
8238 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8239 && (!fn \
8240 || (!TYPE_SIZES_GIMPLIFIED (type) \
8241 && (TREE_CODE (_t) != VAR_DECL \
8242 && !CONTAINS_PLACEHOLDER_P (_t))) \
8243 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8244 return true; } while (0)
8245
8246 if (type == error_mark_node)
8247 return false;
8248
8249 /* If TYPE itself has variable size, it is variably modified. */
8250 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8251 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8252
8253 switch (TREE_CODE (type))
8254 {
8255 case POINTER_TYPE:
8256 case REFERENCE_TYPE:
8257 case VECTOR_TYPE:
8258 /* Ada can have pointer types refering to themselves indirectly. */
8259 if (TREE_VISITED (type))
8260 return false;
8261 TREE_VISITED (type) = true;
8262 if (variably_modified_type_p (TREE_TYPE (type), fn))
8263 {
8264 TREE_VISITED (type) = false;
8265 return true;
8266 }
8267 TREE_VISITED (type) = false;
8268 break;
8269
8270 case FUNCTION_TYPE:
8271 case METHOD_TYPE:
8272 /* If TYPE is a function type, it is variably modified if the
8273 return type is variably modified. */
8274 if (variably_modified_type_p (TREE_TYPE (type), fn))
8275 return true;
8276 break;
8277
8278 case INTEGER_TYPE:
8279 case REAL_TYPE:
8280 case FIXED_POINT_TYPE:
8281 case ENUMERAL_TYPE:
8282 case BOOLEAN_TYPE:
8283 /* Scalar types are variably modified if their end points
8284 aren't constant. */
8285 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8286 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8287 break;
8288
8289 case RECORD_TYPE:
8290 case UNION_TYPE:
8291 case QUAL_UNION_TYPE:
8292 /* We can't see if any of the fields are variably-modified by the
8293 definition we normally use, since that would produce infinite
8294 recursion via pointers. */
8295 /* This is variably modified if some field's type is. */
8296 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8297 if (TREE_CODE (t) == FIELD_DECL)
8298 {
8299 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8300 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8301 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8302
8303 /* If the type is a qualified union, then the DECL_QUALIFIER
8304 of fields can also be an expression containing a variable. */
8305 if (TREE_CODE (type) == QUAL_UNION_TYPE)
8306 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
8307
8308 /* If the field is a qualified union, then it's only a container
8309 for what's inside so we look into it. That's necessary in LTO
8310 mode because the sizes of the field tested above have been set
8311 to PLACEHOLDER_EXPRs by free_lang_data. */
8312 if (TREE_CODE (TREE_TYPE (t)) == QUAL_UNION_TYPE
8313 && variably_modified_type_p (TREE_TYPE (t), fn))
8314 return true;
8315 }
8316 break;
8317
8318 case ARRAY_TYPE:
8319 /* Do not call ourselves to avoid infinite recursion. This is
8320 variably modified if the element type is. */
8321 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
8322 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
8323 break;
8324
8325 default:
8326 break;
8327 }
8328
8329 /* The current language may have other cases to check, but in general,
8330 all other types are not variably modified. */
8331 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
8332
8333 #undef RETURN_TRUE_IF_VAR
8334 }
8335
8336 /* Given a DECL or TYPE, return the scope in which it was declared, or
8337 NULL_TREE if there is no containing scope. */
8338
8339 tree
8340 get_containing_scope (const_tree t)
8341 {
8342 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
8343 }
8344
8345 /* Returns the ultimate TRANSLATION_UNIT_DECL context of DECL or NULL. */
8346
8347 const_tree
8348 get_ultimate_context (const_tree decl)
8349 {
8350 while (decl && TREE_CODE (decl) != TRANSLATION_UNIT_DECL)
8351 {
8352 if (TREE_CODE (decl) == BLOCK)
8353 decl = BLOCK_SUPERCONTEXT (decl);
8354 else
8355 decl = get_containing_scope (decl);
8356 }
8357 return decl;
8358 }
8359
8360 /* Return the innermost context enclosing DECL that is
8361 a FUNCTION_DECL, or zero if none. */
8362
8363 tree
8364 decl_function_context (const_tree decl)
8365 {
8366 tree context;
8367
8368 if (TREE_CODE (decl) == ERROR_MARK)
8369 return 0;
8370
8371 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8372 where we look up the function at runtime. Such functions always take
8373 a first argument of type 'pointer to real context'.
8374
8375 C++ should really be fixed to use DECL_CONTEXT for the real context,
8376 and use something else for the "virtual context". */
8377 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VIRTUAL_P (decl))
8378 context
8379 = TYPE_MAIN_VARIANT
8380 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
8381 else
8382 context = DECL_CONTEXT (decl);
8383
8384 while (context && TREE_CODE (context) != FUNCTION_DECL)
8385 {
8386 if (TREE_CODE (context) == BLOCK)
8387 context = BLOCK_SUPERCONTEXT (context);
8388 else
8389 context = get_containing_scope (context);
8390 }
8391
8392 return context;
8393 }
8394
8395 /* Return the innermost context enclosing DECL that is
8396 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8397 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8398
8399 tree
8400 decl_type_context (const_tree decl)
8401 {
8402 tree context = DECL_CONTEXT (decl);
8403
8404 while (context)
8405 switch (TREE_CODE (context))
8406 {
8407 case NAMESPACE_DECL:
8408 case TRANSLATION_UNIT_DECL:
8409 return NULL_TREE;
8410
8411 case RECORD_TYPE:
8412 case UNION_TYPE:
8413 case QUAL_UNION_TYPE:
8414 return context;
8415
8416 case TYPE_DECL:
8417 case FUNCTION_DECL:
8418 context = DECL_CONTEXT (context);
8419 break;
8420
8421 case BLOCK:
8422 context = BLOCK_SUPERCONTEXT (context);
8423 break;
8424
8425 default:
8426 gcc_unreachable ();
8427 }
8428
8429 return NULL_TREE;
8430 }
8431
8432 /* CALL is a CALL_EXPR. Return the declaration for the function
8433 called, or NULL_TREE if the called function cannot be
8434 determined. */
8435
8436 tree
8437 get_callee_fndecl (const_tree call)
8438 {
8439 tree addr;
8440
8441 if (call == error_mark_node)
8442 return error_mark_node;
8443
8444 /* It's invalid to call this function with anything but a
8445 CALL_EXPR. */
8446 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8447
8448 /* The first operand to the CALL is the address of the function
8449 called. */
8450 addr = CALL_EXPR_FN (call);
8451
8452 /* If there is no function, return early. */
8453 if (addr == NULL_TREE)
8454 return NULL_TREE;
8455
8456 STRIP_NOPS (addr);
8457
8458 /* If this is a readonly function pointer, extract its initial value. */
8459 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
8460 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
8461 && DECL_INITIAL (addr))
8462 addr = DECL_INITIAL (addr);
8463
8464 /* If the address is just `&f' for some function `f', then we know
8465 that `f' is being called. */
8466 if (TREE_CODE (addr) == ADDR_EXPR
8467 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
8468 return TREE_OPERAND (addr, 0);
8469
8470 /* We couldn't figure out what was being called. */
8471 return NULL_TREE;
8472 }
8473
8474 /* Return true when STMTs arguments and return value match those of FNDECL,
8475 a decl of a builtin function. */
8476
8477 static bool
8478 tree_builtin_call_types_compatible_p (const_tree call, tree fndecl)
8479 {
8480 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) != NOT_BUILT_IN);
8481
8482 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
8483 if (tree decl = builtin_decl_explicit (DECL_FUNCTION_CODE (fndecl)))
8484 fndecl = decl;
8485
8486 bool gimple_form = (cfun && (cfun->curr_properties & PROP_gimple)) != 0;
8487 if (gimple_form
8488 ? !useless_type_conversion_p (TREE_TYPE (call),
8489 TREE_TYPE (TREE_TYPE (fndecl)))
8490 : (TYPE_MAIN_VARIANT (TREE_TYPE (call))
8491 != TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (fndecl)))))
8492 return false;
8493
8494 tree targs = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
8495 unsigned nargs = call_expr_nargs (call);
8496 for (unsigned i = 0; i < nargs; ++i, targs = TREE_CHAIN (targs))
8497 {
8498 /* Variadic args follow. */
8499 if (!targs)
8500 return true;
8501 tree arg = CALL_EXPR_ARG (call, i);
8502 tree type = TREE_VALUE (targs);
8503 if (gimple_form
8504 ? !useless_type_conversion_p (type, TREE_TYPE (arg))
8505 : TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (TREE_TYPE (arg)))
8506 {
8507 /* For pointer arguments be more forgiving, e.g. due to
8508 FILE * vs. fileptr_type_node, or say char * vs. const char *
8509 differences etc. */
8510 if (!gimple_form
8511 && POINTER_TYPE_P (type)
8512 && POINTER_TYPE_P (TREE_TYPE (arg))
8513 && tree_nop_conversion_p (type, TREE_TYPE (arg)))
8514 continue;
8515 /* char/short integral arguments are promoted to int
8516 by several frontends if targetm.calls.promote_prototypes
8517 is true. Allow such promotion too. */
8518 if (INTEGRAL_TYPE_P (type)
8519 && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node)
8520 && INTEGRAL_TYPE_P (TREE_TYPE (arg))
8521 && !TYPE_UNSIGNED (TREE_TYPE (arg))
8522 && targetm.calls.promote_prototypes (TREE_TYPE (fndecl))
8523 && (gimple_form
8524 ? useless_type_conversion_p (integer_type_node,
8525 TREE_TYPE (arg))
8526 : tree_nop_conversion_p (integer_type_node,
8527 TREE_TYPE (arg))))
8528 continue;
8529 return false;
8530 }
8531 }
8532 if (targs && !VOID_TYPE_P (TREE_VALUE (targs)))
8533 return false;
8534 return true;
8535 }
8536
8537 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
8538 return the associated function code, otherwise return CFN_LAST. */
8539
8540 combined_fn
8541 get_call_combined_fn (const_tree call)
8542 {
8543 /* It's invalid to call this function with anything but a CALL_EXPR. */
8544 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8545
8546 if (!CALL_EXPR_FN (call))
8547 return as_combined_fn (CALL_EXPR_IFN (call));
8548
8549 tree fndecl = get_callee_fndecl (call);
8550 if (fndecl
8551 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL)
8552 && tree_builtin_call_types_compatible_p (call, fndecl))
8553 return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
8554
8555 return CFN_LAST;
8556 }
8557
8558 /* Comparator of indices based on tree_node_counts. */
8559
8560 static int
8561 tree_nodes_cmp (const void *p1, const void *p2)
8562 {
8563 const unsigned *n1 = (const unsigned *)p1;
8564 const unsigned *n2 = (const unsigned *)p2;
8565
8566 return tree_node_counts[*n1] - tree_node_counts[*n2];
8567 }
8568
8569 /* Comparator of indices based on tree_code_counts. */
8570
8571 static int
8572 tree_codes_cmp (const void *p1, const void *p2)
8573 {
8574 const unsigned *n1 = (const unsigned *)p1;
8575 const unsigned *n2 = (const unsigned *)p2;
8576
8577 return tree_code_counts[*n1] - tree_code_counts[*n2];
8578 }
8579
8580 #define TREE_MEM_USAGE_SPACES 40
8581
8582 /* Print debugging information about tree nodes generated during the compile,
8583 and any language-specific information. */
8584
8585 void
8586 dump_tree_statistics (void)
8587 {
8588 if (GATHER_STATISTICS)
8589 {
8590 uint64_t total_nodes, total_bytes;
8591 fprintf (stderr, "\nKind Nodes Bytes\n");
8592 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8593 total_nodes = total_bytes = 0;
8594
8595 {
8596 auto_vec<unsigned> indices (all_kinds);
8597 for (unsigned i = 0; i < all_kinds; i++)
8598 indices.quick_push (i);
8599 indices.qsort (tree_nodes_cmp);
8600
8601 for (unsigned i = 0; i < (int) all_kinds; i++)
8602 {
8603 unsigned j = indices[i];
8604 fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n",
8605 tree_node_kind_names[j], SIZE_AMOUNT (tree_node_counts[j]),
8606 SIZE_AMOUNT (tree_node_sizes[j]));
8607 total_nodes += tree_node_counts[j];
8608 total_bytes += tree_node_sizes[j];
8609 }
8610 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8611 fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n", "Total",
8612 SIZE_AMOUNT (total_nodes), SIZE_AMOUNT (total_bytes));
8613 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8614 }
8615
8616 {
8617 fprintf (stderr, "Code Nodes\n");
8618 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8619
8620 auto_vec<unsigned> indices (MAX_TREE_CODES);
8621 for (unsigned i = 0; i < MAX_TREE_CODES; i++)
8622 indices.quick_push (i);
8623 indices.qsort (tree_codes_cmp);
8624
8625 for (unsigned i = 0; i < MAX_TREE_CODES; i++)
8626 {
8627 unsigned j = indices[i];
8628 fprintf (stderr, "%-32s %6" PRIu64 "%c\n",
8629 get_tree_code_name ((enum tree_code) j),
8630 SIZE_AMOUNT (tree_code_counts[j]));
8631 }
8632 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8633 fprintf (stderr, "\n");
8634 ssanames_print_statistics ();
8635 fprintf (stderr, "\n");
8636 phinodes_print_statistics ();
8637 fprintf (stderr, "\n");
8638 }
8639 }
8640 else
8641 fprintf (stderr, "(No per-node statistics)\n");
8642
8643 print_type_hash_statistics ();
8644 print_debug_expr_statistics ();
8645 print_value_expr_statistics ();
8646 lang_hooks.print_statistics ();
8647 }
8648 \f
8649 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
8650
8651 /* Generate a crc32 of the low BYTES bytes of VALUE. */
8652
8653 unsigned
8654 crc32_unsigned_n (unsigned chksum, unsigned value, unsigned bytes)
8655 {
8656 /* This relies on the raw feedback's top 4 bits being zero. */
8657 #define FEEDBACK(X) ((X) * 0x04c11db7)
8658 #define SYNDROME(X) (FEEDBACK ((X) & 1) ^ FEEDBACK ((X) & 2) \
8659 ^ FEEDBACK ((X) & 4) ^ FEEDBACK ((X) & 8))
8660 static const unsigned syndromes[16] =
8661 {
8662 SYNDROME(0x0), SYNDROME(0x1), SYNDROME(0x2), SYNDROME(0x3),
8663 SYNDROME(0x4), SYNDROME(0x5), SYNDROME(0x6), SYNDROME(0x7),
8664 SYNDROME(0x8), SYNDROME(0x9), SYNDROME(0xa), SYNDROME(0xb),
8665 SYNDROME(0xc), SYNDROME(0xd), SYNDROME(0xe), SYNDROME(0xf),
8666 };
8667 #undef FEEDBACK
8668 #undef SYNDROME
8669
8670 value <<= (32 - bytes * 8);
8671 for (unsigned ix = bytes * 2; ix--; value <<= 4)
8672 {
8673 unsigned feedback = syndromes[((value ^ chksum) >> 28) & 0xf];
8674
8675 chksum = (chksum << 4) ^ feedback;
8676 }
8677
8678 return chksum;
8679 }
8680
8681 /* Generate a crc32 of a string. */
8682
8683 unsigned
8684 crc32_string (unsigned chksum, const char *string)
8685 {
8686 do
8687 chksum = crc32_byte (chksum, *string);
8688 while (*string++);
8689 return chksum;
8690 }
8691
8692 /* P is a string that will be used in a symbol. Mask out any characters
8693 that are not valid in that context. */
8694
8695 void
8696 clean_symbol_name (char *p)
8697 {
8698 for (; *p; p++)
8699 if (! (ISALNUM (*p)
8700 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
8701 || *p == '$'
8702 #endif
8703 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
8704 || *p == '.'
8705 #endif
8706 ))
8707 *p = '_';
8708 }
8709
8710 static GTY(()) unsigned anon_cnt = 0; /* Saved for PCH. */
8711
8712 /* Create a unique anonymous identifier. The identifier is still a
8713 valid assembly label. */
8714
8715 tree
8716 make_anon_name ()
8717 {
8718 const char *fmt =
8719 #if !defined (NO_DOT_IN_LABEL)
8720 "."
8721 #elif !defined (NO_DOLLAR_IN_LABEL)
8722 "$"
8723 #else
8724 "_"
8725 #endif
8726 "_anon_%d";
8727
8728 char buf[24];
8729 int len = snprintf (buf, sizeof (buf), fmt, anon_cnt++);
8730 gcc_checking_assert (len < int (sizeof (buf)));
8731
8732 tree id = get_identifier_with_length (buf, len);
8733 IDENTIFIER_ANON_P (id) = true;
8734
8735 return id;
8736 }
8737
8738 /* Generate a name for a special-purpose function.
8739 The generated name may need to be unique across the whole link.
8740 Changes to this function may also require corresponding changes to
8741 xstrdup_mask_random.
8742 TYPE is some string to identify the purpose of this function to the
8743 linker or collect2; it must start with an uppercase letter,
8744 one of:
8745 I - for constructors
8746 D - for destructors
8747 N - for C++ anonymous namespaces
8748 F - for DWARF unwind frame information. */
8749
8750 tree
8751 get_file_function_name (const char *type)
8752 {
8753 char *buf;
8754 const char *p;
8755 char *q;
8756
8757 /* If we already have a name we know to be unique, just use that. */
8758 if (first_global_object_name)
8759 p = q = ASTRDUP (first_global_object_name);
8760 /* If the target is handling the constructors/destructors, they
8761 will be local to this file and the name is only necessary for
8762 debugging purposes.
8763 We also assign sub_I and sub_D sufixes to constructors called from
8764 the global static constructors. These are always local. */
8765 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
8766 || (startswith (type, "sub_")
8767 && (type[4] == 'I' || type[4] == 'D')))
8768 {
8769 const char *file = main_input_filename;
8770 if (! file)
8771 file = LOCATION_FILE (input_location);
8772 /* Just use the file's basename, because the full pathname
8773 might be quite long. */
8774 p = q = ASTRDUP (lbasename (file));
8775 }
8776 else
8777 {
8778 /* Otherwise, the name must be unique across the entire link.
8779 We don't have anything that we know to be unique to this translation
8780 unit, so use what we do have and throw in some randomness. */
8781 unsigned len;
8782 const char *name = weak_global_object_name;
8783 const char *file = main_input_filename;
8784
8785 if (! name)
8786 name = "";
8787 if (! file)
8788 file = LOCATION_FILE (input_location);
8789
8790 len = strlen (file);
8791 q = (char *) alloca (9 + 19 + len + 1);
8792 memcpy (q, file, len + 1);
8793
8794 snprintf (q + len, 9 + 19 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
8795 crc32_string (0, name), get_random_seed (false));
8796
8797 p = q;
8798 }
8799
8800 clean_symbol_name (q);
8801 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
8802 + strlen (type));
8803
8804 /* Set up the name of the file-level functions we may need.
8805 Use a global object (which is already required to be unique over
8806 the program) rather than the file name (which imposes extra
8807 constraints). */
8808 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
8809
8810 return get_identifier (buf);
8811 }
8812 \f
8813 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
8814
8815 /* Complain that the tree code of NODE does not match the expected 0
8816 terminated list of trailing codes. The trailing code list can be
8817 empty, for a more vague error message. FILE, LINE, and FUNCTION
8818 are of the caller. */
8819
8820 void
8821 tree_check_failed (const_tree node, const char *file,
8822 int line, const char *function, ...)
8823 {
8824 va_list args;
8825 const char *buffer;
8826 unsigned length = 0;
8827 enum tree_code code;
8828
8829 va_start (args, function);
8830 while ((code = (enum tree_code) va_arg (args, int)))
8831 length += 4 + strlen (get_tree_code_name (code));
8832 va_end (args);
8833 if (length)
8834 {
8835 char *tmp;
8836 va_start (args, function);
8837 length += strlen ("expected ");
8838 buffer = tmp = (char *) alloca (length);
8839 length = 0;
8840 while ((code = (enum tree_code) va_arg (args, int)))
8841 {
8842 const char *prefix = length ? " or " : "expected ";
8843
8844 strcpy (tmp + length, prefix);
8845 length += strlen (prefix);
8846 strcpy (tmp + length, get_tree_code_name (code));
8847 length += strlen (get_tree_code_name (code));
8848 }
8849 va_end (args);
8850 }
8851 else
8852 buffer = "unexpected node";
8853
8854 internal_error ("tree check: %s, have %s in %s, at %s:%d",
8855 buffer, get_tree_code_name (TREE_CODE (node)),
8856 function, trim_filename (file), line);
8857 }
8858
8859 /* Complain that the tree code of NODE does match the expected 0
8860 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
8861 the caller. */
8862
8863 void
8864 tree_not_check_failed (const_tree node, const char *file,
8865 int line, const char *function, ...)
8866 {
8867 va_list args;
8868 char *buffer;
8869 unsigned length = 0;
8870 enum tree_code code;
8871
8872 va_start (args, function);
8873 while ((code = (enum tree_code) va_arg (args, int)))
8874 length += 4 + strlen (get_tree_code_name (code));
8875 va_end (args);
8876 va_start (args, function);
8877 buffer = (char *) alloca (length);
8878 length = 0;
8879 while ((code = (enum tree_code) va_arg (args, int)))
8880 {
8881 if (length)
8882 {
8883 strcpy (buffer + length, " or ");
8884 length += 4;
8885 }
8886 strcpy (buffer + length, get_tree_code_name (code));
8887 length += strlen (get_tree_code_name (code));
8888 }
8889 va_end (args);
8890
8891 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
8892 buffer, get_tree_code_name (TREE_CODE (node)),
8893 function, trim_filename (file), line);
8894 }
8895
8896 /* Similar to tree_check_failed, except that we check for a class of tree
8897 code, given in CL. */
8898
8899 void
8900 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
8901 const char *file, int line, const char *function)
8902 {
8903 internal_error
8904 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
8905 TREE_CODE_CLASS_STRING (cl),
8906 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
8907 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
8908 }
8909
8910 /* Similar to tree_check_failed, except that instead of specifying a
8911 dozen codes, use the knowledge that they're all sequential. */
8912
8913 void
8914 tree_range_check_failed (const_tree node, const char *file, int line,
8915 const char *function, enum tree_code c1,
8916 enum tree_code c2)
8917 {
8918 char *buffer;
8919 unsigned length = 0;
8920 unsigned int c;
8921
8922 for (c = c1; c <= c2; ++c)
8923 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
8924
8925 length += strlen ("expected ");
8926 buffer = (char *) alloca (length);
8927 length = 0;
8928
8929 for (c = c1; c <= c2; ++c)
8930 {
8931 const char *prefix = length ? " or " : "expected ";
8932
8933 strcpy (buffer + length, prefix);
8934 length += strlen (prefix);
8935 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
8936 length += strlen (get_tree_code_name ((enum tree_code) c));
8937 }
8938
8939 internal_error ("tree check: %s, have %s in %s, at %s:%d",
8940 buffer, get_tree_code_name (TREE_CODE (node)),
8941 function, trim_filename (file), line);
8942 }
8943
8944
8945 /* Similar to tree_check_failed, except that we check that a tree does
8946 not have the specified code, given in CL. */
8947
8948 void
8949 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
8950 const char *file, int line, const char *function)
8951 {
8952 internal_error
8953 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
8954 TREE_CODE_CLASS_STRING (cl),
8955 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
8956 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
8957 }
8958
8959
8960 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
8961
8962 void
8963 omp_clause_check_failed (const_tree node, const char *file, int line,
8964 const char *function, enum omp_clause_code code)
8965 {
8966 internal_error ("tree check: expected %<omp_clause %s%>, have %qs "
8967 "in %s, at %s:%d",
8968 omp_clause_code_name[code],
8969 get_tree_code_name (TREE_CODE (node)),
8970 function, trim_filename (file), line);
8971 }
8972
8973
8974 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
8975
8976 void
8977 omp_clause_range_check_failed (const_tree node, const char *file, int line,
8978 const char *function, enum omp_clause_code c1,
8979 enum omp_clause_code c2)
8980 {
8981 char *buffer;
8982 unsigned length = 0;
8983 unsigned int c;
8984
8985 for (c = c1; c <= c2; ++c)
8986 length += 4 + strlen (omp_clause_code_name[c]);
8987
8988 length += strlen ("expected ");
8989 buffer = (char *) alloca (length);
8990 length = 0;
8991
8992 for (c = c1; c <= c2; ++c)
8993 {
8994 const char *prefix = length ? " or " : "expected ";
8995
8996 strcpy (buffer + length, prefix);
8997 length += strlen (prefix);
8998 strcpy (buffer + length, omp_clause_code_name[c]);
8999 length += strlen (omp_clause_code_name[c]);
9000 }
9001
9002 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9003 buffer, omp_clause_code_name[TREE_CODE (node)],
9004 function, trim_filename (file), line);
9005 }
9006
9007
9008 #undef DEFTREESTRUCT
9009 #define DEFTREESTRUCT(VAL, NAME) NAME,
9010
9011 static const char *ts_enum_names[] = {
9012 #include "treestruct.def"
9013 };
9014 #undef DEFTREESTRUCT
9015
9016 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9017
9018 /* Similar to tree_class_check_failed, except that we check for
9019 whether CODE contains the tree structure identified by EN. */
9020
9021 void
9022 tree_contains_struct_check_failed (const_tree node,
9023 const enum tree_node_structure_enum en,
9024 const char *file, int line,
9025 const char *function)
9026 {
9027 internal_error
9028 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9029 TS_ENUM_NAME (en),
9030 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9031 }
9032
9033
9034 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9035 (dynamically sized) vector. */
9036
9037 void
9038 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9039 const char *function)
9040 {
9041 internal_error
9042 ("tree check: accessed elt %d of %<tree_int_cst%> with %d elts in %s, "
9043 "at %s:%d",
9044 idx + 1, len, function, trim_filename (file), line);
9045 }
9046
9047 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9048 (dynamically sized) vector. */
9049
9050 void
9051 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9052 const char *function)
9053 {
9054 internal_error
9055 ("tree check: accessed elt %d of %<tree_vec%> with %d elts in %s, at %s:%d",
9056 idx + 1, len, function, trim_filename (file), line);
9057 }
9058
9059 /* Similar to above, except that the check is for the bounds of the operand
9060 vector of an expression node EXP. */
9061
9062 void
9063 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9064 int line, const char *function)
9065 {
9066 enum tree_code code = TREE_CODE (exp);
9067 internal_error
9068 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9069 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9070 function, trim_filename (file), line);
9071 }
9072
9073 /* Similar to above, except that the check is for the number of
9074 operands of an OMP_CLAUSE node. */
9075
9076 void
9077 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9078 int line, const char *function)
9079 {
9080 internal_error
9081 ("tree check: accessed operand %d of %<omp_clause %s%> with %d operands "
9082 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9083 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9084 trim_filename (file), line);
9085 }
9086 #endif /* ENABLE_TREE_CHECKING */
9087 \f
9088 /* Create a new vector type node holding NUNITS units of type INNERTYPE,
9089 and mapped to the machine mode MODE. Initialize its fields and build
9090 the information necessary for debugging output. */
9091
9092 static tree
9093 make_vector_type (tree innertype, poly_int64 nunits, machine_mode mode)
9094 {
9095 tree t;
9096 tree mv_innertype = TYPE_MAIN_VARIANT (innertype);
9097
9098 t = make_node (VECTOR_TYPE);
9099 TREE_TYPE (t) = mv_innertype;
9100 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9101 SET_TYPE_MODE (t, mode);
9102
9103 if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype) || in_lto_p)
9104 SET_TYPE_STRUCTURAL_EQUALITY (t);
9105 else if ((TYPE_CANONICAL (mv_innertype) != innertype
9106 || mode != VOIDmode)
9107 && !VECTOR_BOOLEAN_TYPE_P (t))
9108 TYPE_CANONICAL (t)
9109 = make_vector_type (TYPE_CANONICAL (mv_innertype), nunits, VOIDmode);
9110
9111 layout_type (t);
9112
9113 hashval_t hash = type_hash_canon_hash (t);
9114 t = type_hash_canon (hash, t);
9115
9116 /* We have built a main variant, based on the main variant of the
9117 inner type. Use it to build the variant we return. */
9118 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9119 && TREE_TYPE (t) != innertype)
9120 return build_type_attribute_qual_variant (t,
9121 TYPE_ATTRIBUTES (innertype),
9122 TYPE_QUALS (innertype));
9123
9124 return t;
9125 }
9126
9127 static tree
9128 make_or_reuse_type (unsigned size, int unsignedp)
9129 {
9130 int i;
9131
9132 if (size == INT_TYPE_SIZE)
9133 return unsignedp ? unsigned_type_node : integer_type_node;
9134 if (size == CHAR_TYPE_SIZE)
9135 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9136 if (size == SHORT_TYPE_SIZE)
9137 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9138 if (size == LONG_TYPE_SIZE)
9139 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9140 if (size == LONG_LONG_TYPE_SIZE)
9141 return (unsignedp ? long_long_unsigned_type_node
9142 : long_long_integer_type_node);
9143
9144 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9145 if (size == int_n_data[i].bitsize
9146 && int_n_enabled_p[i])
9147 return (unsignedp ? int_n_trees[i].unsigned_type
9148 : int_n_trees[i].signed_type);
9149
9150 if (unsignedp)
9151 return make_unsigned_type (size);
9152 else
9153 return make_signed_type (size);
9154 }
9155
9156 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9157
9158 static tree
9159 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9160 {
9161 if (satp)
9162 {
9163 if (size == SHORT_FRACT_TYPE_SIZE)
9164 return unsignedp ? sat_unsigned_short_fract_type_node
9165 : sat_short_fract_type_node;
9166 if (size == FRACT_TYPE_SIZE)
9167 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9168 if (size == LONG_FRACT_TYPE_SIZE)
9169 return unsignedp ? sat_unsigned_long_fract_type_node
9170 : sat_long_fract_type_node;
9171 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9172 return unsignedp ? sat_unsigned_long_long_fract_type_node
9173 : sat_long_long_fract_type_node;
9174 }
9175 else
9176 {
9177 if (size == SHORT_FRACT_TYPE_SIZE)
9178 return unsignedp ? unsigned_short_fract_type_node
9179 : short_fract_type_node;
9180 if (size == FRACT_TYPE_SIZE)
9181 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9182 if (size == LONG_FRACT_TYPE_SIZE)
9183 return unsignedp ? unsigned_long_fract_type_node
9184 : long_fract_type_node;
9185 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9186 return unsignedp ? unsigned_long_long_fract_type_node
9187 : long_long_fract_type_node;
9188 }
9189
9190 return make_fract_type (size, unsignedp, satp);
9191 }
9192
9193 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9194
9195 static tree
9196 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9197 {
9198 if (satp)
9199 {
9200 if (size == SHORT_ACCUM_TYPE_SIZE)
9201 return unsignedp ? sat_unsigned_short_accum_type_node
9202 : sat_short_accum_type_node;
9203 if (size == ACCUM_TYPE_SIZE)
9204 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9205 if (size == LONG_ACCUM_TYPE_SIZE)
9206 return unsignedp ? sat_unsigned_long_accum_type_node
9207 : sat_long_accum_type_node;
9208 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9209 return unsignedp ? sat_unsigned_long_long_accum_type_node
9210 : sat_long_long_accum_type_node;
9211 }
9212 else
9213 {
9214 if (size == SHORT_ACCUM_TYPE_SIZE)
9215 return unsignedp ? unsigned_short_accum_type_node
9216 : short_accum_type_node;
9217 if (size == ACCUM_TYPE_SIZE)
9218 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9219 if (size == LONG_ACCUM_TYPE_SIZE)
9220 return unsignedp ? unsigned_long_accum_type_node
9221 : long_accum_type_node;
9222 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9223 return unsignedp ? unsigned_long_long_accum_type_node
9224 : long_long_accum_type_node;
9225 }
9226
9227 return make_accum_type (size, unsignedp, satp);
9228 }
9229
9230
9231 /* Create an atomic variant node for TYPE. This routine is called
9232 during initialization of data types to create the 5 basic atomic
9233 types. The generic build_variant_type function requires these to
9234 already be set up in order to function properly, so cannot be
9235 called from there. If ALIGN is non-zero, then ensure alignment is
9236 overridden to this value. */
9237
9238 static tree
9239 build_atomic_base (tree type, unsigned int align)
9240 {
9241 tree t;
9242
9243 /* Make sure its not already registered. */
9244 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9245 return t;
9246
9247 t = build_variant_type_copy (type);
9248 set_type_quals (t, TYPE_QUAL_ATOMIC);
9249
9250 if (align)
9251 SET_TYPE_ALIGN (t, align);
9252
9253 return t;
9254 }
9255
9256 /* Information about the _FloatN and _FloatNx types. This must be in
9257 the same order as the corresponding TI_* enum values. */
9258 const floatn_type_info floatn_nx_types[NUM_FLOATN_NX_TYPES] =
9259 {
9260 { 16, false },
9261 { 32, false },
9262 { 64, false },
9263 { 128, false },
9264 { 32, true },
9265 { 64, true },
9266 { 128, true },
9267 };
9268
9269
9270 /* Create nodes for all integer types (and error_mark_node) using the sizes
9271 of C datatypes. SIGNED_CHAR specifies whether char is signed. */
9272
9273 void
9274 build_common_tree_nodes (bool signed_char)
9275 {
9276 int i;
9277
9278 error_mark_node = make_node (ERROR_MARK);
9279 TREE_TYPE (error_mark_node) = error_mark_node;
9280
9281 initialize_sizetypes ();
9282
9283 /* Define both `signed char' and `unsigned char'. */
9284 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9285 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9286 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9287 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9288
9289 /* Define `char', which is like either `signed char' or `unsigned char'
9290 but not the same as either. */
9291 char_type_node
9292 = (signed_char
9293 ? make_signed_type (CHAR_TYPE_SIZE)
9294 : make_unsigned_type (CHAR_TYPE_SIZE));
9295 TYPE_STRING_FLAG (char_type_node) = 1;
9296
9297 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9298 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9299 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9300 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9301 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9302 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9303 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9304 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9305
9306 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9307 {
9308 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
9309 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
9310
9311 if (int_n_enabled_p[i])
9312 {
9313 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
9314 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
9315 }
9316 }
9317
9318 /* Define a boolean type. This type only represents boolean values but
9319 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9320 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9321 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9322 TYPE_PRECISION (boolean_type_node) = 1;
9323 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9324
9325 /* Define what type to use for size_t. */
9326 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9327 size_type_node = unsigned_type_node;
9328 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9329 size_type_node = long_unsigned_type_node;
9330 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9331 size_type_node = long_long_unsigned_type_node;
9332 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9333 size_type_node = short_unsigned_type_node;
9334 else
9335 {
9336 int i;
9337
9338 size_type_node = NULL_TREE;
9339 for (i = 0; i < NUM_INT_N_ENTS; i++)
9340 if (int_n_enabled_p[i])
9341 {
9342 char name[50], altname[50];
9343 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
9344 sprintf (altname, "__int%d__ unsigned", int_n_data[i].bitsize);
9345
9346 if (strcmp (name, SIZE_TYPE) == 0
9347 || strcmp (altname, SIZE_TYPE) == 0)
9348 {
9349 size_type_node = int_n_trees[i].unsigned_type;
9350 }
9351 }
9352 if (size_type_node == NULL_TREE)
9353 gcc_unreachable ();
9354 }
9355
9356 /* Define what type to use for ptrdiff_t. */
9357 if (strcmp (PTRDIFF_TYPE, "int") == 0)
9358 ptrdiff_type_node = integer_type_node;
9359 else if (strcmp (PTRDIFF_TYPE, "long int") == 0)
9360 ptrdiff_type_node = long_integer_type_node;
9361 else if (strcmp (PTRDIFF_TYPE, "long long int") == 0)
9362 ptrdiff_type_node = long_long_integer_type_node;
9363 else if (strcmp (PTRDIFF_TYPE, "short int") == 0)
9364 ptrdiff_type_node = short_integer_type_node;
9365 else
9366 {
9367 ptrdiff_type_node = NULL_TREE;
9368 for (int i = 0; i < NUM_INT_N_ENTS; i++)
9369 if (int_n_enabled_p[i])
9370 {
9371 char name[50], altname[50];
9372 sprintf (name, "__int%d", int_n_data[i].bitsize);
9373 sprintf (altname, "__int%d__", int_n_data[i].bitsize);
9374
9375 if (strcmp (name, PTRDIFF_TYPE) == 0
9376 || strcmp (altname, PTRDIFF_TYPE) == 0)
9377 ptrdiff_type_node = int_n_trees[i].signed_type;
9378 }
9379 if (ptrdiff_type_node == NULL_TREE)
9380 gcc_unreachable ();
9381 }
9382
9383 /* Fill in the rest of the sized types. Reuse existing type nodes
9384 when possible. */
9385 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9386 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9387 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9388 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9389 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9390
9391 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9392 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9393 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9394 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9395 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9396
9397 /* Don't call build_qualified type for atomics. That routine does
9398 special processing for atomics, and until they are initialized
9399 it's better not to make that call.
9400
9401 Check to see if there is a target override for atomic types. */
9402
9403 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
9404 targetm.atomic_align_for_mode (QImode));
9405 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
9406 targetm.atomic_align_for_mode (HImode));
9407 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
9408 targetm.atomic_align_for_mode (SImode));
9409 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
9410 targetm.atomic_align_for_mode (DImode));
9411 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
9412 targetm.atomic_align_for_mode (TImode));
9413
9414 access_public_node = get_identifier ("public");
9415 access_protected_node = get_identifier ("protected");
9416 access_private_node = get_identifier ("private");
9417
9418 /* Define these next since types below may used them. */
9419 integer_zero_node = build_int_cst (integer_type_node, 0);
9420 integer_one_node = build_int_cst (integer_type_node, 1);
9421 integer_three_node = build_int_cst (integer_type_node, 3);
9422 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9423
9424 size_zero_node = size_int (0);
9425 size_one_node = size_int (1);
9426 bitsize_zero_node = bitsize_int (0);
9427 bitsize_one_node = bitsize_int (1);
9428 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9429
9430 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9431 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9432
9433 void_type_node = make_node (VOID_TYPE);
9434 layout_type (void_type_node);
9435
9436 /* We are not going to have real types in C with less than byte alignment,
9437 so we might as well not have any types that claim to have it. */
9438 SET_TYPE_ALIGN (void_type_node, BITS_PER_UNIT);
9439 TYPE_USER_ALIGN (void_type_node) = 0;
9440
9441 void_node = make_node (VOID_CST);
9442 TREE_TYPE (void_node) = void_type_node;
9443
9444 void_list_node = build_tree_list (NULL_TREE, void_type_node);
9445
9446 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9447 layout_type (TREE_TYPE (null_pointer_node));
9448
9449 ptr_type_node = build_pointer_type (void_type_node);
9450 const_ptr_type_node
9451 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9452 for (unsigned i = 0; i < ARRAY_SIZE (builtin_structptr_types); ++i)
9453 builtin_structptr_types[i].node = builtin_structptr_types[i].base;
9454
9455 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9456
9457 float_type_node = make_node (REAL_TYPE);
9458 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9459 layout_type (float_type_node);
9460
9461 double_type_node = make_node (REAL_TYPE);
9462 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9463 layout_type (double_type_node);
9464
9465 long_double_type_node = make_node (REAL_TYPE);
9466 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9467 layout_type (long_double_type_node);
9468
9469 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
9470 {
9471 int n = floatn_nx_types[i].n;
9472 bool extended = floatn_nx_types[i].extended;
9473 scalar_float_mode mode;
9474 if (!targetm.floatn_mode (n, extended).exists (&mode))
9475 continue;
9476 int precision = GET_MODE_PRECISION (mode);
9477 /* Work around the rs6000 KFmode having precision 113 not
9478 128. */
9479 const struct real_format *fmt = REAL_MODE_FORMAT (mode);
9480 gcc_assert (fmt->b == 2 && fmt->emin + fmt->emax == 3);
9481 int min_precision = fmt->p + ceil_log2 (fmt->emax - fmt->emin);
9482 if (!extended)
9483 gcc_assert (min_precision == n);
9484 if (precision < min_precision)
9485 precision = min_precision;
9486 FLOATN_NX_TYPE_NODE (i) = make_node (REAL_TYPE);
9487 TYPE_PRECISION (FLOATN_NX_TYPE_NODE (i)) = precision;
9488 layout_type (FLOATN_NX_TYPE_NODE (i));
9489 SET_TYPE_MODE (FLOATN_NX_TYPE_NODE (i), mode);
9490 }
9491 float128t_type_node = float128_type_node;
9492 #ifdef HAVE_BFmode
9493 if (REAL_MODE_FORMAT (BFmode) == &arm_bfloat_half_format
9494 && targetm.scalar_mode_supported_p (BFmode)
9495 && targetm.libgcc_floating_mode_supported_p (BFmode))
9496 {
9497 bfloat16_type_node = make_node (REAL_TYPE);
9498 TYPE_PRECISION (bfloat16_type_node) = GET_MODE_PRECISION (BFmode);
9499 layout_type (bfloat16_type_node);
9500 SET_TYPE_MODE (bfloat16_type_node, BFmode);
9501 }
9502 #endif
9503
9504 float_ptr_type_node = build_pointer_type (float_type_node);
9505 double_ptr_type_node = build_pointer_type (double_type_node);
9506 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9507 integer_ptr_type_node = build_pointer_type (integer_type_node);
9508
9509 /* Fixed size integer types. */
9510 uint16_type_node = make_or_reuse_type (16, 1);
9511 uint32_type_node = make_or_reuse_type (32, 1);
9512 uint64_type_node = make_or_reuse_type (64, 1);
9513 if (targetm.scalar_mode_supported_p (TImode))
9514 uint128_type_node = make_or_reuse_type (128, 1);
9515
9516 /* Decimal float types. */
9517 if (targetm.decimal_float_supported_p ())
9518 {
9519 dfloat32_type_node = make_node (REAL_TYPE);
9520 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9521 SET_TYPE_MODE (dfloat32_type_node, SDmode);
9522 layout_type (dfloat32_type_node);
9523
9524 dfloat64_type_node = make_node (REAL_TYPE);
9525 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9526 SET_TYPE_MODE (dfloat64_type_node, DDmode);
9527 layout_type (dfloat64_type_node);
9528
9529 dfloat128_type_node = make_node (REAL_TYPE);
9530 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9531 SET_TYPE_MODE (dfloat128_type_node, TDmode);
9532 layout_type (dfloat128_type_node);
9533 }
9534
9535 complex_integer_type_node = build_complex_type (integer_type_node, true);
9536 complex_float_type_node = build_complex_type (float_type_node, true);
9537 complex_double_type_node = build_complex_type (double_type_node, true);
9538 complex_long_double_type_node = build_complex_type (long_double_type_node,
9539 true);
9540
9541 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
9542 {
9543 if (FLOATN_NX_TYPE_NODE (i) != NULL_TREE)
9544 COMPLEX_FLOATN_NX_TYPE_NODE (i)
9545 = build_complex_type (FLOATN_NX_TYPE_NODE (i));
9546 }
9547
9548 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9549 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9550 sat_ ## KIND ## _type_node = \
9551 make_sat_signed_ ## KIND ## _type (SIZE); \
9552 sat_unsigned_ ## KIND ## _type_node = \
9553 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9554 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9555 unsigned_ ## KIND ## _type_node = \
9556 make_unsigned_ ## KIND ## _type (SIZE);
9557
9558 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9559 sat_ ## WIDTH ## KIND ## _type_node = \
9560 make_sat_signed_ ## KIND ## _type (SIZE); \
9561 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9562 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9563 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9564 unsigned_ ## WIDTH ## KIND ## _type_node = \
9565 make_unsigned_ ## KIND ## _type (SIZE);
9566
9567 /* Make fixed-point type nodes based on four different widths. */
9568 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9569 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9570 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9571 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9572 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9573
9574 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9575 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9576 NAME ## _type_node = \
9577 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9578 u ## NAME ## _type_node = \
9579 make_or_reuse_unsigned_ ## KIND ## _type \
9580 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9581 sat_ ## NAME ## _type_node = \
9582 make_or_reuse_sat_signed_ ## KIND ## _type \
9583 (GET_MODE_BITSIZE (MODE ## mode)); \
9584 sat_u ## NAME ## _type_node = \
9585 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9586 (GET_MODE_BITSIZE (U ## MODE ## mode));
9587
9588 /* Fixed-point type and mode nodes. */
9589 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
9590 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
9591 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
9592 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
9593 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
9594 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
9595 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
9596 MAKE_FIXED_MODE_NODE (accum, ha, HA)
9597 MAKE_FIXED_MODE_NODE (accum, sa, SA)
9598 MAKE_FIXED_MODE_NODE (accum, da, DA)
9599 MAKE_FIXED_MODE_NODE (accum, ta, TA)
9600
9601 {
9602 tree t = targetm.build_builtin_va_list ();
9603
9604 /* Many back-ends define record types without setting TYPE_NAME.
9605 If we copied the record type here, we'd keep the original
9606 record type without a name. This breaks name mangling. So,
9607 don't copy record types and let c_common_nodes_and_builtins()
9608 declare the type to be __builtin_va_list. */
9609 if (TREE_CODE (t) != RECORD_TYPE)
9610 t = build_variant_type_copy (t);
9611
9612 va_list_type_node = t;
9613 }
9614
9615 /* SCEV analyzer global shared trees. */
9616 chrec_dont_know = make_node (SCEV_NOT_KNOWN);
9617 TREE_TYPE (chrec_dont_know) = void_type_node;
9618 chrec_known = make_node (SCEV_KNOWN);
9619 TREE_TYPE (chrec_known) = void_type_node;
9620 }
9621
9622 /* Modify DECL for given flags.
9623 TM_PURE attribute is set only on types, so the function will modify
9624 DECL's type when ECF_TM_PURE is used. */
9625
9626 void
9627 set_call_expr_flags (tree decl, int flags)
9628 {
9629 if (flags & ECF_NOTHROW)
9630 TREE_NOTHROW (decl) = 1;
9631 if (flags & ECF_CONST)
9632 TREE_READONLY (decl) = 1;
9633 if (flags & ECF_PURE)
9634 DECL_PURE_P (decl) = 1;
9635 if (flags & ECF_LOOPING_CONST_OR_PURE)
9636 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
9637 if (flags & ECF_NOVOPS)
9638 DECL_IS_NOVOPS (decl) = 1;
9639 if (flags & ECF_NORETURN)
9640 TREE_THIS_VOLATILE (decl) = 1;
9641 if (flags & ECF_MALLOC)
9642 DECL_IS_MALLOC (decl) = 1;
9643 if (flags & ECF_RETURNS_TWICE)
9644 DECL_IS_RETURNS_TWICE (decl) = 1;
9645 if (flags & ECF_LEAF)
9646 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
9647 NULL, DECL_ATTRIBUTES (decl));
9648 if (flags & ECF_COLD)
9649 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("cold"),
9650 NULL, DECL_ATTRIBUTES (decl));
9651 if (flags & ECF_RET1)
9652 DECL_ATTRIBUTES (decl)
9653 = tree_cons (get_identifier ("fn spec"),
9654 build_tree_list (NULL_TREE, build_string (2, "1 ")),
9655 DECL_ATTRIBUTES (decl));
9656 if ((flags & ECF_TM_PURE) && flag_tm)
9657 apply_tm_attr (decl, get_identifier ("transaction_pure"));
9658 /* Looping const or pure is implied by noreturn.
9659 There is currently no way to declare looping const or looping pure alone. */
9660 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
9661 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
9662 }
9663
9664
9665 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9666
9667 static void
9668 local_define_builtin (const char *name, tree type, enum built_in_function code,
9669 const char *library_name, int ecf_flags)
9670 {
9671 tree decl;
9672
9673 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
9674 library_name, NULL_TREE);
9675 set_call_expr_flags (decl, ecf_flags);
9676
9677 set_builtin_decl (code, decl, true);
9678 }
9679
9680 /* Call this function after instantiating all builtins that the language
9681 front end cares about. This will build the rest of the builtins
9682 and internal functions that are relied upon by the tree optimizers and
9683 the middle-end. */
9684
9685 void
9686 build_common_builtin_nodes (void)
9687 {
9688 tree tmp, ftype;
9689 int ecf_flags;
9690
9691 if (!builtin_decl_explicit_p (BUILT_IN_CLEAR_PADDING))
9692 {
9693 ftype = build_function_type_list (void_type_node,
9694 ptr_type_node,
9695 ptr_type_node,
9696 integer_type_node,
9697 NULL_TREE);
9698 local_define_builtin ("__builtin_clear_padding", ftype,
9699 BUILT_IN_CLEAR_PADDING,
9700 "__builtin_clear_padding",
9701 ECF_LEAF | ECF_NOTHROW);
9702 }
9703
9704 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE)
9705 || !builtin_decl_explicit_p (BUILT_IN_TRAP)
9706 || !builtin_decl_explicit_p (BUILT_IN_ABORT))
9707 {
9708 ftype = build_function_type (void_type_node, void_list_node);
9709 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
9710 local_define_builtin ("__builtin_unreachable", ftype,
9711 BUILT_IN_UNREACHABLE,
9712 "__builtin_unreachable",
9713 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
9714 | ECF_CONST | ECF_COLD);
9715 if (!builtin_decl_explicit_p (BUILT_IN_ABORT))
9716 local_define_builtin ("__builtin_abort", ftype, BUILT_IN_ABORT,
9717 "abort",
9718 ECF_LEAF | ECF_NORETURN | ECF_CONST | ECF_COLD);
9719 if (!builtin_decl_explicit_p (BUILT_IN_TRAP))
9720 local_define_builtin ("__builtin_trap", ftype, BUILT_IN_TRAP,
9721 "__builtin_trap",
9722 ECF_NORETURN | ECF_NOTHROW | ECF_LEAF | ECF_COLD);
9723 }
9724
9725 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
9726 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9727 {
9728 ftype = build_function_type_list (ptr_type_node,
9729 ptr_type_node, const_ptr_type_node,
9730 size_type_node, NULL_TREE);
9731
9732 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
9733 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
9734 "memcpy", ECF_NOTHROW | ECF_LEAF);
9735 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9736 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
9737 "memmove", ECF_NOTHROW | ECF_LEAF);
9738 }
9739
9740 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
9741 {
9742 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9743 const_ptr_type_node, size_type_node,
9744 NULL_TREE);
9745 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
9746 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9747 }
9748
9749 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
9750 {
9751 ftype = build_function_type_list (ptr_type_node,
9752 ptr_type_node, integer_type_node,
9753 size_type_node, NULL_TREE);
9754 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
9755 "memset", ECF_NOTHROW | ECF_LEAF);
9756 }
9757
9758 /* If we're checking the stack, `alloca' can throw. */
9759 const int alloca_flags
9760 = ECF_MALLOC | ECF_LEAF | (flag_stack_check ? 0 : ECF_NOTHROW);
9761
9762 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
9763 {
9764 ftype = build_function_type_list (ptr_type_node,
9765 size_type_node, NULL_TREE);
9766 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
9767 "alloca", alloca_flags);
9768 }
9769
9770 ftype = build_function_type_list (ptr_type_node, size_type_node,
9771 size_type_node, NULL_TREE);
9772 local_define_builtin ("__builtin_alloca_with_align", ftype,
9773 BUILT_IN_ALLOCA_WITH_ALIGN,
9774 "__builtin_alloca_with_align",
9775 alloca_flags);
9776
9777 ftype = build_function_type_list (ptr_type_node, size_type_node,
9778 size_type_node, size_type_node, NULL_TREE);
9779 local_define_builtin ("__builtin_alloca_with_align_and_max", ftype,
9780 BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX,
9781 "__builtin_alloca_with_align_and_max",
9782 alloca_flags);
9783
9784 ftype = build_function_type_list (void_type_node,
9785 ptr_type_node, ptr_type_node,
9786 ptr_type_node, NULL_TREE);
9787 local_define_builtin ("__builtin_init_trampoline", ftype,
9788 BUILT_IN_INIT_TRAMPOLINE,
9789 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
9790 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
9791 BUILT_IN_INIT_HEAP_TRAMPOLINE,
9792 "__builtin_init_heap_trampoline",
9793 ECF_NOTHROW | ECF_LEAF);
9794 local_define_builtin ("__builtin_init_descriptor", ftype,
9795 BUILT_IN_INIT_DESCRIPTOR,
9796 "__builtin_init_descriptor", ECF_NOTHROW | ECF_LEAF);
9797
9798 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
9799 local_define_builtin ("__builtin_adjust_trampoline", ftype,
9800 BUILT_IN_ADJUST_TRAMPOLINE,
9801 "__builtin_adjust_trampoline",
9802 ECF_CONST | ECF_NOTHROW);
9803 local_define_builtin ("__builtin_adjust_descriptor", ftype,
9804 BUILT_IN_ADJUST_DESCRIPTOR,
9805 "__builtin_adjust_descriptor",
9806 ECF_CONST | ECF_NOTHROW);
9807
9808 ftype = build_function_type_list (void_type_node,
9809 ptr_type_node, ptr_type_node, NULL_TREE);
9810 if (!builtin_decl_explicit_p (BUILT_IN_CLEAR_CACHE))
9811 local_define_builtin ("__builtin___clear_cache", ftype,
9812 BUILT_IN_CLEAR_CACHE,
9813 "__clear_cache",
9814 ECF_NOTHROW);
9815
9816 local_define_builtin ("__builtin_nonlocal_goto", ftype,
9817 BUILT_IN_NONLOCAL_GOTO,
9818 "__builtin_nonlocal_goto",
9819 ECF_NORETURN | ECF_NOTHROW);
9820
9821 ftype = build_function_type_list (void_type_node,
9822 ptr_type_node, ptr_type_node, NULL_TREE);
9823 local_define_builtin ("__builtin_setjmp_setup", ftype,
9824 BUILT_IN_SETJMP_SETUP,
9825 "__builtin_setjmp_setup", ECF_NOTHROW);
9826
9827 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9828 local_define_builtin ("__builtin_setjmp_receiver", ftype,
9829 BUILT_IN_SETJMP_RECEIVER,
9830 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
9831
9832 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
9833 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
9834 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
9835
9836 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9837 local_define_builtin ("__builtin_stack_restore", ftype,
9838 BUILT_IN_STACK_RESTORE,
9839 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
9840
9841 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9842 const_ptr_type_node, size_type_node,
9843 NULL_TREE);
9844 local_define_builtin ("__builtin_memcmp_eq", ftype, BUILT_IN_MEMCMP_EQ,
9845 "__builtin_memcmp_eq",
9846 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9847
9848 local_define_builtin ("__builtin_strncmp_eq", ftype, BUILT_IN_STRNCMP_EQ,
9849 "__builtin_strncmp_eq",
9850 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9851
9852 local_define_builtin ("__builtin_strcmp_eq", ftype, BUILT_IN_STRCMP_EQ,
9853 "__builtin_strcmp_eq",
9854 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9855
9856 /* If there's a possibility that we might use the ARM EABI, build the
9857 alternate __cxa_end_cleanup node used to resume from C++. */
9858 if (targetm.arm_eabi_unwinder)
9859 {
9860 ftype = build_function_type_list (void_type_node, NULL_TREE);
9861 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
9862 BUILT_IN_CXA_END_CLEANUP,
9863 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
9864 }
9865
9866 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9867 local_define_builtin ("__builtin_unwind_resume", ftype,
9868 BUILT_IN_UNWIND_RESUME,
9869 ((targetm_common.except_unwind_info (&global_options)
9870 == UI_SJLJ)
9871 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
9872 ECF_NORETURN);
9873
9874 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
9875 {
9876 ftype = build_function_type_list (ptr_type_node, integer_type_node,
9877 NULL_TREE);
9878 local_define_builtin ("__builtin_return_address", ftype,
9879 BUILT_IN_RETURN_ADDRESS,
9880 "__builtin_return_address",
9881 ECF_NOTHROW);
9882 }
9883
9884 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
9885 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
9886 {
9887 ftype = build_function_type_list (void_type_node, ptr_type_node,
9888 ptr_type_node, NULL_TREE);
9889 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
9890 local_define_builtin ("__cyg_profile_func_enter", ftype,
9891 BUILT_IN_PROFILE_FUNC_ENTER,
9892 "__cyg_profile_func_enter", 0);
9893 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
9894 local_define_builtin ("__cyg_profile_func_exit", ftype,
9895 BUILT_IN_PROFILE_FUNC_EXIT,
9896 "__cyg_profile_func_exit", 0);
9897 }
9898
9899 /* The exception object and filter values from the runtime. The argument
9900 must be zero before exception lowering, i.e. from the front end. After
9901 exception lowering, it will be the region number for the exception
9902 landing pad. These functions are PURE instead of CONST to prevent
9903 them from being hoisted past the exception edge that will initialize
9904 its value in the landing pad. */
9905 ftype = build_function_type_list (ptr_type_node,
9906 integer_type_node, NULL_TREE);
9907 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
9908 /* Only use TM_PURE if we have TM language support. */
9909 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
9910 ecf_flags |= ECF_TM_PURE;
9911 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
9912 "__builtin_eh_pointer", ecf_flags);
9913
9914 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
9915 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
9916 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
9917 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9918
9919 ftype = build_function_type_list (void_type_node,
9920 integer_type_node, integer_type_node,
9921 NULL_TREE);
9922 local_define_builtin ("__builtin_eh_copy_values", ftype,
9923 BUILT_IN_EH_COPY_VALUES,
9924 "__builtin_eh_copy_values", ECF_NOTHROW);
9925
9926 /* Complex multiplication and division. These are handled as builtins
9927 rather than optabs because emit_library_call_value doesn't support
9928 complex. Further, we can do slightly better with folding these
9929 beasties if the real and complex parts of the arguments are separate. */
9930 {
9931 int mode;
9932
9933 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
9934 {
9935 char mode_name_buf[4], *q;
9936 const char *p;
9937 enum built_in_function mcode, dcode;
9938 tree type, inner_type;
9939 const char *prefix = "__";
9940
9941 if (targetm.libfunc_gnu_prefix)
9942 prefix = "__gnu_";
9943
9944 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
9945 if (type == NULL)
9946 continue;
9947 inner_type = TREE_TYPE (type);
9948
9949 ftype = build_function_type_list (type, inner_type, inner_type,
9950 inner_type, inner_type, NULL_TREE);
9951
9952 mcode = ((enum built_in_function)
9953 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
9954 dcode = ((enum built_in_function)
9955 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
9956
9957 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
9958 *q = TOLOWER (*p);
9959 *q = '\0';
9960
9961 /* For -ftrapping-math these should throw from a former
9962 -fnon-call-exception stmt. */
9963 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
9964 NULL);
9965 local_define_builtin (built_in_names[mcode], ftype, mcode,
9966 built_in_names[mcode],
9967 ECF_CONST | ECF_LEAF);
9968
9969 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
9970 NULL);
9971 local_define_builtin (built_in_names[dcode], ftype, dcode,
9972 built_in_names[dcode],
9973 ECF_CONST | ECF_LEAF);
9974 }
9975 }
9976
9977 init_internal_fns ();
9978 }
9979
9980 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
9981 better way.
9982
9983 If we requested a pointer to a vector, build up the pointers that
9984 we stripped off while looking for the inner type. Similarly for
9985 return values from functions.
9986
9987 The argument TYPE is the top of the chain, and BOTTOM is the
9988 new type which we will point to. */
9989
9990 tree
9991 reconstruct_complex_type (tree type, tree bottom)
9992 {
9993 tree inner, outer;
9994
9995 if (TREE_CODE (type) == POINTER_TYPE)
9996 {
9997 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9998 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
9999 TYPE_REF_CAN_ALIAS_ALL (type));
10000 }
10001 else if (TREE_CODE (type) == REFERENCE_TYPE)
10002 {
10003 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10004 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10005 TYPE_REF_CAN_ALIAS_ALL (type));
10006 }
10007 else if (TREE_CODE (type) == ARRAY_TYPE)
10008 {
10009 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10010 outer = build_array_type (inner, TYPE_DOMAIN (type));
10011 }
10012 else if (TREE_CODE (type) == FUNCTION_TYPE)
10013 {
10014 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10015 outer = build_function_type (inner, TYPE_ARG_TYPES (type),
10016 TYPE_NO_NAMED_ARGS_STDARG_P (type));
10017 }
10018 else if (TREE_CODE (type) == METHOD_TYPE)
10019 {
10020 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10021 /* The build_method_type_directly() routine prepends 'this' to argument list,
10022 so we must compensate by getting rid of it. */
10023 outer
10024 = build_method_type_directly
10025 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10026 inner,
10027 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10028 }
10029 else if (TREE_CODE (type) == OFFSET_TYPE)
10030 {
10031 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10032 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10033 }
10034 else
10035 return bottom;
10036
10037 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10038 TYPE_QUALS (type));
10039 }
10040
10041 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10042 the inner type. */
10043 tree
10044 build_vector_type_for_mode (tree innertype, machine_mode mode)
10045 {
10046 poly_int64 nunits;
10047 unsigned int bitsize;
10048
10049 switch (GET_MODE_CLASS (mode))
10050 {
10051 case MODE_VECTOR_BOOL:
10052 case MODE_VECTOR_INT:
10053 case MODE_VECTOR_FLOAT:
10054 case MODE_VECTOR_FRACT:
10055 case MODE_VECTOR_UFRACT:
10056 case MODE_VECTOR_ACCUM:
10057 case MODE_VECTOR_UACCUM:
10058 nunits = GET_MODE_NUNITS (mode);
10059 break;
10060
10061 case MODE_INT:
10062 /* Check that there are no leftover bits. */
10063 bitsize = GET_MODE_BITSIZE (as_a <scalar_int_mode> (mode));
10064 gcc_assert (bitsize % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10065 nunits = bitsize / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10066 break;
10067
10068 default:
10069 gcc_unreachable ();
10070 }
10071
10072 return make_vector_type (innertype, nunits, mode);
10073 }
10074
10075 /* Similarly, but takes the inner type and number of units, which must be
10076 a power of two. */
10077
10078 tree
10079 build_vector_type (tree innertype, poly_int64 nunits)
10080 {
10081 return make_vector_type (innertype, nunits, VOIDmode);
10082 }
10083
10084 /* Build a truth vector with NUNITS units, giving it mode MASK_MODE. */
10085
10086 tree
10087 build_truth_vector_type_for_mode (poly_uint64 nunits, machine_mode mask_mode)
10088 {
10089 gcc_assert (mask_mode != BLKmode);
10090
10091 unsigned HOST_WIDE_INT esize;
10092 if (VECTOR_MODE_P (mask_mode))
10093 {
10094 poly_uint64 vsize = GET_MODE_BITSIZE (mask_mode);
10095 esize = vector_element_size (vsize, nunits);
10096 }
10097 else
10098 esize = 1;
10099
10100 tree bool_type = build_nonstandard_boolean_type (esize);
10101
10102 return make_vector_type (bool_type, nunits, mask_mode);
10103 }
10104
10105 /* Build a vector type that holds one boolean result for each element of
10106 vector type VECTYPE. The public interface for this operation is
10107 truth_type_for. */
10108
10109 static tree
10110 build_truth_vector_type_for (tree vectype)
10111 {
10112 machine_mode vector_mode = TYPE_MODE (vectype);
10113 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vectype);
10114
10115 machine_mode mask_mode;
10116 if (VECTOR_MODE_P (vector_mode)
10117 && targetm.vectorize.get_mask_mode (vector_mode).exists (&mask_mode))
10118 return build_truth_vector_type_for_mode (nunits, mask_mode);
10119
10120 poly_uint64 vsize = tree_to_poly_uint64 (TYPE_SIZE (vectype));
10121 unsigned HOST_WIDE_INT esize = vector_element_size (vsize, nunits);
10122 tree bool_type = build_nonstandard_boolean_type (esize);
10123
10124 return make_vector_type (bool_type, nunits, VOIDmode);
10125 }
10126
10127 /* Like build_vector_type, but builds a variant type with TYPE_VECTOR_OPAQUE
10128 set. */
10129
10130 tree
10131 build_opaque_vector_type (tree innertype, poly_int64 nunits)
10132 {
10133 tree t = make_vector_type (innertype, nunits, VOIDmode);
10134 tree cand;
10135 /* We always build the non-opaque variant before the opaque one,
10136 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10137 cand = TYPE_NEXT_VARIANT (t);
10138 if (cand
10139 && TYPE_VECTOR_OPAQUE (cand)
10140 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10141 return cand;
10142 /* Othewise build a variant type and make sure to queue it after
10143 the non-opaque type. */
10144 cand = build_distinct_type_copy (t);
10145 TYPE_VECTOR_OPAQUE (cand) = true;
10146 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10147 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10148 TYPE_NEXT_VARIANT (t) = cand;
10149 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10150 return cand;
10151 }
10152
10153 /* Return the value of element I of VECTOR_CST T as a wide_int. */
10154
10155 static poly_wide_int
10156 vector_cst_int_elt (const_tree t, unsigned int i)
10157 {
10158 /* First handle elements that are directly encoded. */
10159 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
10160 if (i < encoded_nelts)
10161 return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t, i));
10162
10163 /* Identify the pattern that contains element I and work out the index of
10164 the last encoded element for that pattern. */
10165 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
10166 unsigned int pattern = i % npatterns;
10167 unsigned int count = i / npatterns;
10168 unsigned int final_i = encoded_nelts - npatterns + pattern;
10169
10170 /* If there are no steps, the final encoded value is the right one. */
10171 if (!VECTOR_CST_STEPPED_P (t))
10172 return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t, final_i));
10173
10174 /* Otherwise work out the value from the last two encoded elements. */
10175 tree v1 = VECTOR_CST_ENCODED_ELT (t, final_i - npatterns);
10176 tree v2 = VECTOR_CST_ENCODED_ELT (t, final_i);
10177 poly_wide_int diff = wi::to_poly_wide (v2) - wi::to_poly_wide (v1);
10178 return wi::to_poly_wide (v2) + (count - 2) * diff;
10179 }
10180
10181 /* Return the value of element I of VECTOR_CST T. */
10182
10183 tree
10184 vector_cst_elt (const_tree t, unsigned int i)
10185 {
10186 /* First handle elements that are directly encoded. */
10187 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
10188 if (i < encoded_nelts)
10189 return VECTOR_CST_ENCODED_ELT (t, i);
10190
10191 /* If there are no steps, the final encoded value is the right one. */
10192 if (!VECTOR_CST_STEPPED_P (t))
10193 {
10194 /* Identify the pattern that contains element I and work out the index of
10195 the last encoded element for that pattern. */
10196 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
10197 unsigned int pattern = i % npatterns;
10198 unsigned int final_i = encoded_nelts - npatterns + pattern;
10199 return VECTOR_CST_ENCODED_ELT (t, final_i);
10200 }
10201
10202 /* Otherwise work out the value from the last two encoded elements. */
10203 return wide_int_to_tree (TREE_TYPE (TREE_TYPE (t)),
10204 vector_cst_int_elt (t, i));
10205 }
10206
10207 /* Given an initializer INIT, return TRUE if INIT is zero or some
10208 aggregate of zeros. Otherwise return FALSE. If NONZERO is not
10209 null, set *NONZERO if and only if INIT is known not to be all
10210 zeros. The combination of return value of false and *NONZERO
10211 false implies that INIT may but need not be all zeros. Other
10212 combinations indicate definitive answers. */
10213
10214 bool
10215 initializer_zerop (const_tree init, bool *nonzero /* = NULL */)
10216 {
10217 bool dummy;
10218 if (!nonzero)
10219 nonzero = &dummy;
10220
10221 /* Conservatively clear NONZERO and set it only if INIT is definitely
10222 not all zero. */
10223 *nonzero = false;
10224
10225 STRIP_NOPS (init);
10226
10227 unsigned HOST_WIDE_INT off = 0;
10228
10229 switch (TREE_CODE (init))
10230 {
10231 case INTEGER_CST:
10232 if (integer_zerop (init))
10233 return true;
10234
10235 *nonzero = true;
10236 return false;
10237
10238 case REAL_CST:
10239 /* ??? Note that this is not correct for C4X float formats. There,
10240 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10241 negative exponent. */
10242 if (real_zerop (init)
10243 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init)))
10244 return true;
10245
10246 *nonzero = true;
10247 return false;
10248
10249 case FIXED_CST:
10250 if (fixed_zerop (init))
10251 return true;
10252
10253 *nonzero = true;
10254 return false;
10255
10256 case COMPLEX_CST:
10257 if (integer_zerop (init)
10258 || (real_zerop (init)
10259 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10260 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init)))))
10261 return true;
10262
10263 *nonzero = true;
10264 return false;
10265
10266 case VECTOR_CST:
10267 if (VECTOR_CST_NPATTERNS (init) == 1
10268 && VECTOR_CST_DUPLICATE_P (init)
10269 && initializer_zerop (VECTOR_CST_ENCODED_ELT (init, 0)))
10270 return true;
10271
10272 *nonzero = true;
10273 return false;
10274
10275 case CONSTRUCTOR:
10276 {
10277 if (TREE_CLOBBER_P (init))
10278 return false;
10279
10280 unsigned HOST_WIDE_INT idx;
10281 tree elt;
10282
10283 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10284 if (!initializer_zerop (elt, nonzero))
10285 return false;
10286
10287 return true;
10288 }
10289
10290 case MEM_REF:
10291 {
10292 tree arg = TREE_OPERAND (init, 0);
10293 if (TREE_CODE (arg) != ADDR_EXPR)
10294 return false;
10295 tree offset = TREE_OPERAND (init, 1);
10296 if (TREE_CODE (offset) != INTEGER_CST
10297 || !tree_fits_uhwi_p (offset))
10298 return false;
10299 off = tree_to_uhwi (offset);
10300 if (INT_MAX < off)
10301 return false;
10302 arg = TREE_OPERAND (arg, 0);
10303 if (TREE_CODE (arg) != STRING_CST)
10304 return false;
10305 init = arg;
10306 }
10307 /* Fall through. */
10308
10309 case STRING_CST:
10310 {
10311 gcc_assert (off <= INT_MAX);
10312
10313 int i = off;
10314 int n = TREE_STRING_LENGTH (init);
10315 if (n <= i)
10316 return false;
10317
10318 /* We need to loop through all elements to handle cases like
10319 "\0" and "\0foobar". */
10320 for (i = 0; i < n; ++i)
10321 if (TREE_STRING_POINTER (init)[i] != '\0')
10322 {
10323 *nonzero = true;
10324 return false;
10325 }
10326
10327 return true;
10328 }
10329
10330 default:
10331 return false;
10332 }
10333 }
10334
10335 /* Return true if EXPR is an initializer expression in which every element
10336 is a constant that is numerically equal to 0 or 1. The elements do not
10337 need to be equal to each other. */
10338
10339 bool
10340 initializer_each_zero_or_onep (const_tree expr)
10341 {
10342 STRIP_ANY_LOCATION_WRAPPER (expr);
10343
10344 switch (TREE_CODE (expr))
10345 {
10346 case INTEGER_CST:
10347 return integer_zerop (expr) || integer_onep (expr);
10348
10349 case REAL_CST:
10350 return real_zerop (expr) || real_onep (expr);
10351
10352 case VECTOR_CST:
10353 {
10354 unsigned HOST_WIDE_INT nelts = vector_cst_encoded_nelts (expr);
10355 if (VECTOR_CST_STEPPED_P (expr)
10356 && !TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr)).is_constant (&nelts))
10357 return false;
10358
10359 for (unsigned int i = 0; i < nelts; ++i)
10360 {
10361 tree elt = vector_cst_elt (expr, i);
10362 if (!initializer_each_zero_or_onep (elt))
10363 return false;
10364 }
10365
10366 return true;
10367 }
10368
10369 default:
10370 return false;
10371 }
10372 }
10373
10374 /* Check if vector VEC consists of all the equal elements and
10375 that the number of elements corresponds to the type of VEC.
10376 The function returns first element of the vector
10377 or NULL_TREE if the vector is not uniform. */
10378 tree
10379 uniform_vector_p (const_tree vec)
10380 {
10381 tree first, t;
10382 unsigned HOST_WIDE_INT i, nelts;
10383
10384 if (vec == NULL_TREE)
10385 return NULL_TREE;
10386
10387 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10388
10389 if (TREE_CODE (vec) == VEC_DUPLICATE_EXPR)
10390 return TREE_OPERAND (vec, 0);
10391
10392 else if (TREE_CODE (vec) == VECTOR_CST)
10393 {
10394 if (VECTOR_CST_NPATTERNS (vec) == 1 && VECTOR_CST_DUPLICATE_P (vec))
10395 return VECTOR_CST_ENCODED_ELT (vec, 0);
10396 return NULL_TREE;
10397 }
10398
10399 else if (TREE_CODE (vec) == CONSTRUCTOR
10400 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)).is_constant (&nelts))
10401 {
10402 first = error_mark_node;
10403
10404 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10405 {
10406 if (i == 0)
10407 {
10408 first = t;
10409 continue;
10410 }
10411 if (!operand_equal_p (first, t, 0))
10412 return NULL_TREE;
10413 }
10414 if (i != nelts)
10415 return NULL_TREE;
10416
10417 if (TREE_CODE (first) == CONSTRUCTOR || TREE_CODE (first) == VECTOR_CST)
10418 return uniform_vector_p (first);
10419 return first;
10420 }
10421
10422 return NULL_TREE;
10423 }
10424
10425 /* If the argument is INTEGER_CST, return it. If the argument is vector
10426 with all elements the same INTEGER_CST, return that INTEGER_CST. Otherwise
10427 return NULL_TREE.
10428 Look through location wrappers. */
10429
10430 tree
10431 uniform_integer_cst_p (tree t)
10432 {
10433 STRIP_ANY_LOCATION_WRAPPER (t);
10434
10435 if (TREE_CODE (t) == INTEGER_CST)
10436 return t;
10437
10438 if (VECTOR_TYPE_P (TREE_TYPE (t)))
10439 {
10440 t = uniform_vector_p (t);
10441 if (t && TREE_CODE (t) == INTEGER_CST)
10442 return t;
10443 }
10444
10445 return NULL_TREE;
10446 }
10447
10448 /* Checks to see if T is a constant or a constant vector and if each element E
10449 adheres to ~E + 1 == pow2 then return ~E otherwise NULL_TREE. */
10450
10451 tree
10452 bitmask_inv_cst_vector_p (tree t)
10453 {
10454
10455 tree_code code = TREE_CODE (t);
10456 tree type = TREE_TYPE (t);
10457
10458 if (!INTEGRAL_TYPE_P (type)
10459 && !VECTOR_INTEGER_TYPE_P (type))
10460 return NULL_TREE;
10461
10462 unsigned HOST_WIDE_INT nelts = 1;
10463 tree cst;
10464 unsigned int idx = 0;
10465 bool uniform = uniform_integer_cst_p (t);
10466 tree newtype = unsigned_type_for (type);
10467 tree_vector_builder builder;
10468 if (code == INTEGER_CST)
10469 cst = t;
10470 else
10471 {
10472 if (!VECTOR_CST_NELTS (t).is_constant (&nelts))
10473 return NULL_TREE;
10474
10475 cst = vector_cst_elt (t, 0);
10476 builder.new_vector (newtype, nelts, 1);
10477 }
10478
10479 tree ty = unsigned_type_for (TREE_TYPE (cst));
10480
10481 do
10482 {
10483 if (idx > 0)
10484 cst = vector_cst_elt (t, idx);
10485 wide_int icst = wi::to_wide (cst);
10486 wide_int inv = wi::bit_not (icst);
10487 icst = wi::add (1, inv);
10488 if (wi::popcount (icst) != 1)
10489 return NULL_TREE;
10490
10491 tree newcst = wide_int_to_tree (ty, inv);
10492
10493 if (uniform)
10494 return build_uniform_cst (newtype, newcst);
10495
10496 builder.quick_push (newcst);
10497 }
10498 while (++idx < nelts);
10499
10500 return builder.build ();
10501 }
10502
10503 /* If VECTOR_CST T has a single nonzero element, return the index of that
10504 element, otherwise return -1. */
10505
10506 int
10507 single_nonzero_element (const_tree t)
10508 {
10509 unsigned HOST_WIDE_INT nelts;
10510 unsigned int repeat_nelts;
10511 if (VECTOR_CST_NELTS (t).is_constant (&nelts))
10512 repeat_nelts = nelts;
10513 else if (VECTOR_CST_NELTS_PER_PATTERN (t) == 2)
10514 {
10515 nelts = vector_cst_encoded_nelts (t);
10516 repeat_nelts = VECTOR_CST_NPATTERNS (t);
10517 }
10518 else
10519 return -1;
10520
10521 int res = -1;
10522 for (unsigned int i = 0; i < nelts; ++i)
10523 {
10524 tree elt = vector_cst_elt (t, i);
10525 if (!integer_zerop (elt) && !real_zerop (elt))
10526 {
10527 if (res >= 0 || i >= repeat_nelts)
10528 return -1;
10529 res = i;
10530 }
10531 }
10532 return res;
10533 }
10534
10535 /* Build an empty statement at location LOC. */
10536
10537 tree
10538 build_empty_stmt (location_t loc)
10539 {
10540 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10541 SET_EXPR_LOCATION (t, loc);
10542 return t;
10543 }
10544
10545
10546 /* Build an OMP clause with code CODE. LOC is the location of the
10547 clause. */
10548
10549 tree
10550 build_omp_clause (location_t loc, enum omp_clause_code code)
10551 {
10552 tree t;
10553 int size, length;
10554
10555 length = omp_clause_num_ops[code];
10556 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10557
10558 record_node_allocation_statistics (OMP_CLAUSE, size);
10559
10560 t = (tree) ggc_internal_alloc (size);
10561 memset (t, 0, size);
10562 TREE_SET_CODE (t, OMP_CLAUSE);
10563 OMP_CLAUSE_SET_CODE (t, code);
10564 OMP_CLAUSE_LOCATION (t) = loc;
10565
10566 return t;
10567 }
10568
10569 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10570 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10571 Except for the CODE and operand count field, other storage for the
10572 object is initialized to zeros. */
10573
10574 tree
10575 build_vl_exp (enum tree_code code, int len MEM_STAT_DECL)
10576 {
10577 tree t;
10578 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10579
10580 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10581 gcc_assert (len >= 1);
10582
10583 record_node_allocation_statistics (code, length);
10584
10585 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10586
10587 TREE_SET_CODE (t, code);
10588
10589 /* Can't use TREE_OPERAND to store the length because if checking is
10590 enabled, it will try to check the length before we store it. :-P */
10591 t->exp.operands[0] = build_int_cst (sizetype, len);
10592
10593 return t;
10594 }
10595
10596 /* Helper function for build_call_* functions; build a CALL_EXPR with
10597 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10598 the argument slots. */
10599
10600 static tree
10601 build_call_1 (tree return_type, tree fn, int nargs)
10602 {
10603 tree t;
10604
10605 t = build_vl_exp (CALL_EXPR, nargs + 3);
10606 TREE_TYPE (t) = return_type;
10607 CALL_EXPR_FN (t) = fn;
10608 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10609
10610 return t;
10611 }
10612
10613 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10614 FN and a null static chain slot. NARGS is the number of call arguments
10615 which are specified as "..." arguments. */
10616
10617 tree
10618 build_call_nary (tree return_type, tree fn, int nargs, ...)
10619 {
10620 tree ret;
10621 va_list args;
10622 va_start (args, nargs);
10623 ret = build_call_valist (return_type, fn, nargs, args);
10624 va_end (args);
10625 return ret;
10626 }
10627
10628 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10629 FN and a null static chain slot. NARGS is the number of call arguments
10630 which are specified as a va_list ARGS. */
10631
10632 tree
10633 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10634 {
10635 tree t;
10636 int i;
10637
10638 t = build_call_1 (return_type, fn, nargs);
10639 for (i = 0; i < nargs; i++)
10640 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10641 process_call_operands (t);
10642 return t;
10643 }
10644
10645 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10646 FN and a null static chain slot. NARGS is the number of call arguments
10647 which are specified as a tree array ARGS. */
10648
10649 tree
10650 build_call_array_loc (location_t loc, tree return_type, tree fn,
10651 int nargs, const tree *args)
10652 {
10653 tree t;
10654 int i;
10655
10656 t = build_call_1 (return_type, fn, nargs);
10657 for (i = 0; i < nargs; i++)
10658 CALL_EXPR_ARG (t, i) = args[i];
10659 process_call_operands (t);
10660 SET_EXPR_LOCATION (t, loc);
10661 return t;
10662 }
10663
10664 /* Like build_call_array, but takes a vec. */
10665
10666 tree
10667 build_call_vec (tree return_type, tree fn, const vec<tree, va_gc> *args)
10668 {
10669 tree ret, t;
10670 unsigned int ix;
10671
10672 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10673 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10674 CALL_EXPR_ARG (ret, ix) = t;
10675 process_call_operands (ret);
10676 return ret;
10677 }
10678
10679 /* Conveniently construct a function call expression. FNDECL names the
10680 function to be called and N arguments are passed in the array
10681 ARGARRAY. */
10682
10683 tree
10684 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10685 {
10686 tree fntype = TREE_TYPE (fndecl);
10687 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10688
10689 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
10690 }
10691
10692 /* Conveniently construct a function call expression. FNDECL names the
10693 function to be called and the arguments are passed in the vector
10694 VEC. */
10695
10696 tree
10697 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10698 {
10699 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10700 vec_safe_address (vec));
10701 }
10702
10703
10704 /* Conveniently construct a function call expression. FNDECL names the
10705 function to be called, N is the number of arguments, and the "..."
10706 parameters are the argument expressions. */
10707
10708 tree
10709 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10710 {
10711 va_list ap;
10712 tree *argarray = XALLOCAVEC (tree, n);
10713 int i;
10714
10715 va_start (ap, n);
10716 for (i = 0; i < n; i++)
10717 argarray[i] = va_arg (ap, tree);
10718 va_end (ap);
10719 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10720 }
10721
10722 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10723 varargs macros aren't supported by all bootstrap compilers. */
10724
10725 tree
10726 build_call_expr (tree fndecl, int n, ...)
10727 {
10728 va_list ap;
10729 tree *argarray = XALLOCAVEC (tree, n);
10730 int i;
10731
10732 va_start (ap, n);
10733 for (i = 0; i < n; i++)
10734 argarray[i] = va_arg (ap, tree);
10735 va_end (ap);
10736 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10737 }
10738
10739 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
10740 type TYPE. This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
10741 It will get gimplified later into an ordinary internal function. */
10742
10743 tree
10744 build_call_expr_internal_loc_array (location_t loc, internal_fn ifn,
10745 tree type, int n, const tree *args)
10746 {
10747 tree t = build_call_1 (type, NULL_TREE, n);
10748 for (int i = 0; i < n; ++i)
10749 CALL_EXPR_ARG (t, i) = args[i];
10750 SET_EXPR_LOCATION (t, loc);
10751 CALL_EXPR_IFN (t) = ifn;
10752 process_call_operands (t);
10753 return t;
10754 }
10755
10756 /* Build internal call expression. This is just like CALL_EXPR, except
10757 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10758 internal function. */
10759
10760 tree
10761 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
10762 tree type, int n, ...)
10763 {
10764 va_list ap;
10765 tree *argarray = XALLOCAVEC (tree, n);
10766 int i;
10767
10768 va_start (ap, n);
10769 for (i = 0; i < n; i++)
10770 argarray[i] = va_arg (ap, tree);
10771 va_end (ap);
10772 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
10773 }
10774
10775 /* Return a function call to FN, if the target is guaranteed to support it,
10776 or null otherwise.
10777
10778 N is the number of arguments, passed in the "...", and TYPE is the
10779 type of the return value. */
10780
10781 tree
10782 maybe_build_call_expr_loc (location_t loc, combined_fn fn, tree type,
10783 int n, ...)
10784 {
10785 va_list ap;
10786 tree *argarray = XALLOCAVEC (tree, n);
10787 int i;
10788
10789 va_start (ap, n);
10790 for (i = 0; i < n; i++)
10791 argarray[i] = va_arg (ap, tree);
10792 va_end (ap);
10793 if (internal_fn_p (fn))
10794 {
10795 internal_fn ifn = as_internal_fn (fn);
10796 if (direct_internal_fn_p (ifn))
10797 {
10798 tree_pair types = direct_internal_fn_types (ifn, type, argarray);
10799 if (!direct_internal_fn_supported_p (ifn, types,
10800 OPTIMIZE_FOR_BOTH))
10801 return NULL_TREE;
10802 }
10803 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
10804 }
10805 else
10806 {
10807 tree fndecl = builtin_decl_implicit (as_builtin_fn (fn));
10808 if (!fndecl)
10809 return NULL_TREE;
10810 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10811 }
10812 }
10813
10814 /* Return a function call to the appropriate builtin alloca variant.
10815
10816 SIZE is the size to be allocated. ALIGN, if non-zero, is the requested
10817 alignment of the allocated area. MAX_SIZE, if non-negative, is an upper
10818 bound for SIZE in case it is not a fixed value. */
10819
10820 tree
10821 build_alloca_call_expr (tree size, unsigned int align, HOST_WIDE_INT max_size)
10822 {
10823 if (max_size >= 0)
10824 {
10825 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX);
10826 return
10827 build_call_expr (t, 3, size, size_int (align), size_int (max_size));
10828 }
10829 else if (align > 0)
10830 {
10831 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
10832 return build_call_expr (t, 2, size, size_int (align));
10833 }
10834 else
10835 {
10836 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA);
10837 return build_call_expr (t, 1, size);
10838 }
10839 }
10840
10841 /* The built-in decl to use to mark code points believed to be unreachable.
10842 Typically __builtin_unreachable, but __builtin_trap if
10843 -fsanitize=unreachable -fsanitize-trap=unreachable. If only
10844 -fsanitize=unreachable, we rely on sanopt to replace calls with the
10845 appropriate ubsan function. When building a call directly, use
10846 {gimple_},build_builtin_unreachable instead. */
10847
10848 tree
10849 builtin_decl_unreachable ()
10850 {
10851 enum built_in_function fncode = BUILT_IN_UNREACHABLE;
10852
10853 if (sanitize_flags_p (SANITIZE_UNREACHABLE)
10854 ? (flag_sanitize_trap & SANITIZE_UNREACHABLE)
10855 : flag_unreachable_traps)
10856 fncode = BUILT_IN_TRAP;
10857 /* For non-trapping sanitize, we will rewrite __builtin_unreachable () later,
10858 in the sanopt pass. */
10859
10860 return builtin_decl_explicit (fncode);
10861 }
10862
10863 /* Build a call to __builtin_unreachable, possibly rewritten by
10864 -fsanitize=unreachable. Use this rather than the above when practical. */
10865
10866 tree
10867 build_builtin_unreachable (location_t loc)
10868 {
10869 tree data = NULL_TREE;
10870 tree fn = sanitize_unreachable_fn (&data, loc);
10871 return build_call_expr_loc (loc, fn, data != NULL_TREE, data);
10872 }
10873
10874 /* Create a new constant string literal of type ELTYPE[SIZE] (or LEN
10875 if SIZE == -1) and return a tree node representing char* pointer to
10876 it as an ADDR_EXPR (ARRAY_REF (ELTYPE, ...)). When STR is nonnull
10877 the STRING_CST value is the LEN bytes at STR (the representation
10878 of the string, which may be wide). Otherwise it's all zeros. */
10879
10880 tree
10881 build_string_literal (unsigned len, const char *str /* = NULL */,
10882 tree eltype /* = char_type_node */,
10883 unsigned HOST_WIDE_INT size /* = -1 */)
10884 {
10885 tree t = build_string (len, str);
10886 /* Set the maximum valid index based on the string length or SIZE. */
10887 unsigned HOST_WIDE_INT maxidx
10888 = (size == HOST_WIDE_INT_M1U ? len : size) - 1;
10889
10890 tree index = build_index_type (size_int (maxidx));
10891 eltype = build_type_variant (eltype, 1, 0);
10892 tree type = build_array_type (eltype, index);
10893 TREE_TYPE (t) = type;
10894 TREE_CONSTANT (t) = 1;
10895 TREE_READONLY (t) = 1;
10896 TREE_STATIC (t) = 1;
10897
10898 type = build_pointer_type (eltype);
10899 t = build1 (ADDR_EXPR, type,
10900 build4 (ARRAY_REF, eltype,
10901 t, integer_zero_node, NULL_TREE, NULL_TREE));
10902 return t;
10903 }
10904
10905
10906
10907 /* Return true if T (assumed to be a DECL) must be assigned a memory
10908 location. */
10909
10910 bool
10911 needs_to_live_in_memory (const_tree t)
10912 {
10913 return (TREE_ADDRESSABLE (t)
10914 || is_global_var (t)
10915 || (TREE_CODE (t) == RESULT_DECL
10916 && !DECL_BY_REFERENCE (t)
10917 && aggregate_value_p (t, current_function_decl)));
10918 }
10919
10920 /* Return value of a constant X and sign-extend it. */
10921
10922 HOST_WIDE_INT
10923 int_cst_value (const_tree x)
10924 {
10925 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10926 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
10927
10928 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10929 gcc_assert (cst_and_fits_in_hwi (x));
10930
10931 if (bits < HOST_BITS_PER_WIDE_INT)
10932 {
10933 bool negative = ((val >> (bits - 1)) & 1) != 0;
10934 if (negative)
10935 val |= HOST_WIDE_INT_M1U << (bits - 1) << 1;
10936 else
10937 val &= ~(HOST_WIDE_INT_M1U << (bits - 1) << 1);
10938 }
10939
10940 return val;
10941 }
10942
10943 /* If TYPE is an integral or pointer type, return an integer type with
10944 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10945 if TYPE is already an integer type of signedness UNSIGNEDP.
10946 If TYPE is a floating-point type, return an integer type with the same
10947 bitsize and with the signedness given by UNSIGNEDP; this is useful
10948 when doing bit-level operations on a floating-point value. */
10949
10950 tree
10951 signed_or_unsigned_type_for (int unsignedp, tree type)
10952 {
10953 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type) == unsignedp)
10954 return type;
10955
10956 if (TREE_CODE (type) == VECTOR_TYPE)
10957 {
10958 tree inner = TREE_TYPE (type);
10959 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10960 if (!inner2)
10961 return NULL_TREE;
10962 if (inner == inner2)
10963 return type;
10964 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
10965 }
10966
10967 if (TREE_CODE (type) == COMPLEX_TYPE)
10968 {
10969 tree inner = TREE_TYPE (type);
10970 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10971 if (!inner2)
10972 return NULL_TREE;
10973 if (inner == inner2)
10974 return type;
10975 return build_complex_type (inner2);
10976 }
10977
10978 unsigned int bits;
10979 if (INTEGRAL_TYPE_P (type)
10980 || POINTER_TYPE_P (type)
10981 || TREE_CODE (type) == OFFSET_TYPE)
10982 bits = TYPE_PRECISION (type);
10983 else if (TREE_CODE (type) == REAL_TYPE)
10984 bits = GET_MODE_BITSIZE (SCALAR_TYPE_MODE (type));
10985 else
10986 return NULL_TREE;
10987
10988 return build_nonstandard_integer_type (bits, unsignedp);
10989 }
10990
10991 /* If TYPE is an integral or pointer type, return an integer type with
10992 the same precision which is unsigned, or itself if TYPE is already an
10993 unsigned integer type. If TYPE is a floating-point type, return an
10994 unsigned integer type with the same bitsize as TYPE. */
10995
10996 tree
10997 unsigned_type_for (tree type)
10998 {
10999 return signed_or_unsigned_type_for (1, type);
11000 }
11001
11002 /* If TYPE is an integral or pointer type, return an integer type with
11003 the same precision which is signed, or itself if TYPE is already a
11004 signed integer type. If TYPE is a floating-point type, return a
11005 signed integer type with the same bitsize as TYPE. */
11006
11007 tree
11008 signed_type_for (tree type)
11009 {
11010 return signed_or_unsigned_type_for (0, type);
11011 }
11012
11013 /* - For VECTOR_TYPEs:
11014 - The truth type must be a VECTOR_BOOLEAN_TYPE.
11015 - The number of elements must match (known_eq).
11016 - targetm.vectorize.get_mask_mode exists, and exactly
11017 the same mode as the truth type.
11018 - Otherwise, the truth type must be a BOOLEAN_TYPE
11019 or useless_type_conversion_p to BOOLEAN_TYPE. */
11020 bool
11021 is_truth_type_for (tree type, tree truth_type)
11022 {
11023 machine_mode mask_mode = TYPE_MODE (truth_type);
11024 machine_mode vmode = TYPE_MODE (type);
11025 machine_mode tmask_mode;
11026
11027 if (TREE_CODE (type) == VECTOR_TYPE)
11028 {
11029 if (VECTOR_BOOLEAN_TYPE_P (truth_type)
11030 && known_eq (TYPE_VECTOR_SUBPARTS (type),
11031 TYPE_VECTOR_SUBPARTS (truth_type))
11032 && targetm.vectorize.get_mask_mode (vmode).exists (&tmask_mode)
11033 && tmask_mode == mask_mode)
11034 return true;
11035
11036 return false;
11037 }
11038
11039 return useless_type_conversion_p (boolean_type_node, truth_type);
11040 }
11041
11042 /* If TYPE is a vector type, return a signed integer vector type with the
11043 same width and number of subparts. Otherwise return boolean_type_node. */
11044
11045 tree
11046 truth_type_for (tree type)
11047 {
11048 if (TREE_CODE (type) == VECTOR_TYPE)
11049 {
11050 if (VECTOR_BOOLEAN_TYPE_P (type))
11051 return type;
11052 return build_truth_vector_type_for (type);
11053 }
11054 else
11055 return boolean_type_node;
11056 }
11057
11058 /* Returns the largest value obtainable by casting something in INNER type to
11059 OUTER type. */
11060
11061 tree
11062 upper_bound_in_type (tree outer, tree inner)
11063 {
11064 unsigned int det = 0;
11065 unsigned oprec = TYPE_PRECISION (outer);
11066 unsigned iprec = TYPE_PRECISION (inner);
11067 unsigned prec;
11068
11069 /* Compute a unique number for every combination. */
11070 det |= (oprec > iprec) ? 4 : 0;
11071 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
11072 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
11073
11074 /* Determine the exponent to use. */
11075 switch (det)
11076 {
11077 case 0:
11078 case 1:
11079 /* oprec <= iprec, outer: signed, inner: don't care. */
11080 prec = oprec - 1;
11081 break;
11082 case 2:
11083 case 3:
11084 /* oprec <= iprec, outer: unsigned, inner: don't care. */
11085 prec = oprec;
11086 break;
11087 case 4:
11088 /* oprec > iprec, outer: signed, inner: signed. */
11089 prec = iprec - 1;
11090 break;
11091 case 5:
11092 /* oprec > iprec, outer: signed, inner: unsigned. */
11093 prec = iprec;
11094 break;
11095 case 6:
11096 /* oprec > iprec, outer: unsigned, inner: signed. */
11097 prec = oprec;
11098 break;
11099 case 7:
11100 /* oprec > iprec, outer: unsigned, inner: unsigned. */
11101 prec = iprec;
11102 break;
11103 default:
11104 gcc_unreachable ();
11105 }
11106
11107 return wide_int_to_tree (outer,
11108 wi::mask (prec, false, TYPE_PRECISION (outer)));
11109 }
11110
11111 /* Returns the smallest value obtainable by casting something in INNER type to
11112 OUTER type. */
11113
11114 tree
11115 lower_bound_in_type (tree outer, tree inner)
11116 {
11117 unsigned oprec = TYPE_PRECISION (outer);
11118 unsigned iprec = TYPE_PRECISION (inner);
11119
11120 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
11121 and obtain 0. */
11122 if (TYPE_UNSIGNED (outer)
11123 /* If we are widening something of an unsigned type, OUTER type
11124 contains all values of INNER type. In particular, both INNER
11125 and OUTER types have zero in common. */
11126 || (oprec > iprec && TYPE_UNSIGNED (inner)))
11127 return build_int_cst (outer, 0);
11128 else
11129 {
11130 /* If we are widening a signed type to another signed type, we
11131 want to obtain -2^^(iprec-1). If we are keeping the
11132 precision or narrowing to a signed type, we want to obtain
11133 -2^(oprec-1). */
11134 unsigned prec = oprec > iprec ? iprec : oprec;
11135 return wide_int_to_tree (outer,
11136 wi::mask (prec - 1, true,
11137 TYPE_PRECISION (outer)));
11138 }
11139 }
11140
11141 /* Return nonzero if two operands that are suitable for PHI nodes are
11142 necessarily equal. Specifically, both ARG0 and ARG1 must be either
11143 SSA_NAME or invariant. Note that this is strictly an optimization.
11144 That is, callers of this function can directly call operand_equal_p
11145 and get the same result, only slower. */
11146
11147 int
11148 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
11149 {
11150 if (arg0 == arg1)
11151 return 1;
11152 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
11153 return 0;
11154 return operand_equal_p (arg0, arg1, 0);
11155 }
11156
11157 /* Returns number of zeros at the end of binary representation of X. */
11158
11159 tree
11160 num_ending_zeros (const_tree x)
11161 {
11162 return build_int_cst (TREE_TYPE (x), wi::ctz (wi::to_wide (x)));
11163 }
11164
11165
11166 #define WALK_SUBTREE(NODE) \
11167 do \
11168 { \
11169 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
11170 if (result) \
11171 return result; \
11172 } \
11173 while (0)
11174
11175 /* This is a subroutine of walk_tree that walks field of TYPE that are to
11176 be walked whenever a type is seen in the tree. Rest of operands and return
11177 value are as for walk_tree. */
11178
11179 static tree
11180 walk_type_fields (tree type, walk_tree_fn func, void *data,
11181 hash_set<tree> *pset, walk_tree_lh lh)
11182 {
11183 tree result = NULL_TREE;
11184
11185 switch (TREE_CODE (type))
11186 {
11187 case POINTER_TYPE:
11188 case REFERENCE_TYPE:
11189 case VECTOR_TYPE:
11190 /* We have to worry about mutually recursive pointers. These can't
11191 be written in C. They can in Ada. It's pathological, but
11192 there's an ACATS test (c38102a) that checks it. Deal with this
11193 by checking if we're pointing to another pointer, that one
11194 points to another pointer, that one does too, and we have no htab.
11195 If so, get a hash table. We check three levels deep to avoid
11196 the cost of the hash table if we don't need one. */
11197 if (POINTER_TYPE_P (TREE_TYPE (type))
11198 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
11199 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
11200 && !pset)
11201 {
11202 result = walk_tree_without_duplicates (&TREE_TYPE (type),
11203 func, data);
11204 if (result)
11205 return result;
11206
11207 break;
11208 }
11209
11210 /* fall through */
11211
11212 case COMPLEX_TYPE:
11213 WALK_SUBTREE (TREE_TYPE (type));
11214 break;
11215
11216 case METHOD_TYPE:
11217 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
11218
11219 /* Fall through. */
11220
11221 case FUNCTION_TYPE:
11222 WALK_SUBTREE (TREE_TYPE (type));
11223 {
11224 tree arg;
11225
11226 /* We never want to walk into default arguments. */
11227 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
11228 WALK_SUBTREE (TREE_VALUE (arg));
11229 }
11230 break;
11231
11232 case ARRAY_TYPE:
11233 /* Don't follow this nodes's type if a pointer for fear that
11234 we'll have infinite recursion. If we have a PSET, then we
11235 need not fear. */
11236 if (pset
11237 || (!POINTER_TYPE_P (TREE_TYPE (type))
11238 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
11239 WALK_SUBTREE (TREE_TYPE (type));
11240 WALK_SUBTREE (TYPE_DOMAIN (type));
11241 break;
11242
11243 case OFFSET_TYPE:
11244 WALK_SUBTREE (TREE_TYPE (type));
11245 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
11246 break;
11247
11248 default:
11249 break;
11250 }
11251
11252 return NULL_TREE;
11253 }
11254
11255 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11256 called with the DATA and the address of each sub-tree. If FUNC returns a
11257 non-NULL value, the traversal is stopped, and the value returned by FUNC
11258 is returned. If PSET is non-NULL it is used to record the nodes visited,
11259 and to avoid visiting a node more than once. */
11260
11261 tree
11262 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
11263 hash_set<tree> *pset, walk_tree_lh lh)
11264 {
11265 enum tree_code code;
11266 int walk_subtrees;
11267 tree result;
11268
11269 #define WALK_SUBTREE_TAIL(NODE) \
11270 do \
11271 { \
11272 tp = & (NODE); \
11273 goto tail_recurse; \
11274 } \
11275 while (0)
11276
11277 tail_recurse:
11278 /* Skip empty subtrees. */
11279 if (!*tp)
11280 return NULL_TREE;
11281
11282 /* Don't walk the same tree twice, if the user has requested
11283 that we avoid doing so. */
11284 if (pset && pset->add (*tp))
11285 return NULL_TREE;
11286
11287 /* Call the function. */
11288 walk_subtrees = 1;
11289 result = (*func) (tp, &walk_subtrees, data);
11290
11291 /* If we found something, return it. */
11292 if (result)
11293 return result;
11294
11295 code = TREE_CODE (*tp);
11296
11297 /* Even if we didn't, FUNC may have decided that there was nothing
11298 interesting below this point in the tree. */
11299 if (!walk_subtrees)
11300 {
11301 /* But we still need to check our siblings. */
11302 if (code == TREE_LIST)
11303 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11304 else if (code == OMP_CLAUSE)
11305 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11306 else
11307 return NULL_TREE;
11308 }
11309
11310 if (lh)
11311 {
11312 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11313 if (result || !walk_subtrees)
11314 return result;
11315 }
11316
11317 switch (code)
11318 {
11319 case ERROR_MARK:
11320 case IDENTIFIER_NODE:
11321 case INTEGER_CST:
11322 case REAL_CST:
11323 case FIXED_CST:
11324 case STRING_CST:
11325 case BLOCK:
11326 case PLACEHOLDER_EXPR:
11327 case SSA_NAME:
11328 case FIELD_DECL:
11329 case RESULT_DECL:
11330 /* None of these have subtrees other than those already walked
11331 above. */
11332 break;
11333
11334 case TREE_LIST:
11335 WALK_SUBTREE (TREE_VALUE (*tp));
11336 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11337
11338 case TREE_VEC:
11339 {
11340 int len = TREE_VEC_LENGTH (*tp);
11341
11342 if (len == 0)
11343 break;
11344
11345 /* Walk all elements but the last. */
11346 for (int i = 0; i < len - 1; ++i)
11347 WALK_SUBTREE (TREE_VEC_ELT (*tp, i));
11348
11349 /* Now walk the last one as a tail call. */
11350 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, len - 1));
11351 }
11352
11353 case VECTOR_CST:
11354 {
11355 unsigned len = vector_cst_encoded_nelts (*tp);
11356 if (len == 0)
11357 break;
11358 /* Walk all elements but the last. */
11359 for (unsigned i = 0; i < len - 1; ++i)
11360 WALK_SUBTREE (VECTOR_CST_ENCODED_ELT (*tp, i));
11361 /* Now walk the last one as a tail call. */
11362 WALK_SUBTREE_TAIL (VECTOR_CST_ENCODED_ELT (*tp, len - 1));
11363 }
11364
11365 case COMPLEX_CST:
11366 WALK_SUBTREE (TREE_REALPART (*tp));
11367 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11368
11369 case CONSTRUCTOR:
11370 {
11371 unsigned HOST_WIDE_INT idx;
11372 constructor_elt *ce;
11373
11374 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11375 idx++)
11376 WALK_SUBTREE (ce->value);
11377 }
11378 break;
11379
11380 case SAVE_EXPR:
11381 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11382
11383 case BIND_EXPR:
11384 {
11385 tree decl;
11386 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11387 {
11388 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11389 into declarations that are just mentioned, rather than
11390 declared; they don't really belong to this part of the tree.
11391 And, we can see cycles: the initializer for a declaration
11392 can refer to the declaration itself. */
11393 WALK_SUBTREE (DECL_INITIAL (decl));
11394 WALK_SUBTREE (DECL_SIZE (decl));
11395 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11396 }
11397 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11398 }
11399
11400 case STATEMENT_LIST:
11401 {
11402 tree_stmt_iterator i;
11403 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11404 WALK_SUBTREE (*tsi_stmt_ptr (i));
11405 }
11406 break;
11407
11408 case OMP_CLAUSE:
11409 {
11410 int len = omp_clause_num_ops[OMP_CLAUSE_CODE (*tp)];
11411 for (int i = 0; i < len; i++)
11412 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11413 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11414 }
11415
11416 case TARGET_EXPR:
11417 {
11418 int i, len;
11419
11420 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11421 But, we only want to walk once. */
11422 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11423 for (i = 0; i < len; ++i)
11424 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11425 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11426 }
11427
11428 case DECL_EXPR:
11429 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11430 defining. We only want to walk into these fields of a type in this
11431 case and not in the general case of a mere reference to the type.
11432
11433 The criterion is as follows: if the field can be an expression, it
11434 must be walked only here. This should be in keeping with the fields
11435 that are directly gimplified in gimplify_type_sizes in order for the
11436 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11437 variable-sized types.
11438
11439 Note that DECLs get walked as part of processing the BIND_EXPR. */
11440 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11441 {
11442 /* Call the function for the decl so e.g. copy_tree_body_r can
11443 replace it with the remapped one. */
11444 result = (*func) (&DECL_EXPR_DECL (*tp), &walk_subtrees, data);
11445 if (result || !walk_subtrees)
11446 return result;
11447
11448 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11449 if (TREE_CODE (*type_p) == ERROR_MARK)
11450 return NULL_TREE;
11451
11452 /* Call the function for the type. See if it returns anything or
11453 doesn't want us to continue. If we are to continue, walk both
11454 the normal fields and those for the declaration case. */
11455 result = (*func) (type_p, &walk_subtrees, data);
11456 if (result || !walk_subtrees)
11457 return result;
11458
11459 /* But do not walk a pointed-to type since it may itself need to
11460 be walked in the declaration case if it isn't anonymous. */
11461 if (!POINTER_TYPE_P (*type_p))
11462 {
11463 result = walk_type_fields (*type_p, func, data, pset, lh);
11464 if (result)
11465 return result;
11466 }
11467
11468 /* If this is a record type, also walk the fields. */
11469 if (RECORD_OR_UNION_TYPE_P (*type_p))
11470 {
11471 tree field;
11472
11473 for (field = TYPE_FIELDS (*type_p); field;
11474 field = DECL_CHAIN (field))
11475 {
11476 /* We'd like to look at the type of the field, but we can
11477 easily get infinite recursion. So assume it's pointed
11478 to elsewhere in the tree. Also, ignore things that
11479 aren't fields. */
11480 if (TREE_CODE (field) != FIELD_DECL)
11481 continue;
11482
11483 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11484 WALK_SUBTREE (DECL_SIZE (field));
11485 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11486 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11487 WALK_SUBTREE (DECL_QUALIFIER (field));
11488 }
11489 }
11490
11491 /* Same for scalar types. */
11492 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11493 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11494 || TREE_CODE (*type_p) == INTEGER_TYPE
11495 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11496 || TREE_CODE (*type_p) == REAL_TYPE)
11497 {
11498 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11499 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11500 }
11501
11502 WALK_SUBTREE (TYPE_SIZE (*type_p));
11503 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11504 }
11505 /* FALLTHRU */
11506
11507 default:
11508 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11509 {
11510 int i, len;
11511
11512 /* Walk over all the sub-trees of this operand. */
11513 len = TREE_OPERAND_LENGTH (*tp);
11514
11515 /* Go through the subtrees. We need to do this in forward order so
11516 that the scope of a FOR_EXPR is handled properly. */
11517 if (len)
11518 {
11519 for (i = 0; i < len - 1; ++i)
11520 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11521 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11522 }
11523 }
11524 /* If this is a type, walk the needed fields in the type. */
11525 else if (TYPE_P (*tp))
11526 return walk_type_fields (*tp, func, data, pset, lh);
11527 break;
11528 }
11529
11530 /* We didn't find what we were looking for. */
11531 return NULL_TREE;
11532
11533 #undef WALK_SUBTREE_TAIL
11534 }
11535 #undef WALK_SUBTREE
11536
11537 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11538
11539 tree
11540 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11541 walk_tree_lh lh)
11542 {
11543 tree result;
11544
11545 hash_set<tree> pset;
11546 result = walk_tree_1 (tp, func, data, &pset, lh);
11547 return result;
11548 }
11549
11550
11551 tree
11552 tree_block (tree t)
11553 {
11554 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11555
11556 if (IS_EXPR_CODE_CLASS (c))
11557 return LOCATION_BLOCK (t->exp.locus);
11558 gcc_unreachable ();
11559 return NULL;
11560 }
11561
11562 void
11563 tree_set_block (tree t, tree b)
11564 {
11565 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11566
11567 if (IS_EXPR_CODE_CLASS (c))
11568 {
11569 t->exp.locus = set_block (t->exp.locus, b);
11570 }
11571 else
11572 gcc_unreachable ();
11573 }
11574
11575 /* Create a nameless artificial label and put it in the current
11576 function context. The label has a location of LOC. Returns the
11577 newly created label. */
11578
11579 tree
11580 create_artificial_label (location_t loc)
11581 {
11582 tree lab = build_decl (loc,
11583 LABEL_DECL, NULL_TREE, void_type_node);
11584
11585 DECL_ARTIFICIAL (lab) = 1;
11586 DECL_IGNORED_P (lab) = 1;
11587 DECL_CONTEXT (lab) = current_function_decl;
11588 return lab;
11589 }
11590
11591 /* Given a tree, try to return a useful variable name that we can use
11592 to prefix a temporary that is being assigned the value of the tree.
11593 I.E. given <temp> = &A, return A. */
11594
11595 const char *
11596 get_name (tree t)
11597 {
11598 tree stripped_decl;
11599
11600 stripped_decl = t;
11601 STRIP_NOPS (stripped_decl);
11602 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11603 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11604 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11605 {
11606 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11607 if (!name)
11608 return NULL;
11609 return IDENTIFIER_POINTER (name);
11610 }
11611 else
11612 {
11613 switch (TREE_CODE (stripped_decl))
11614 {
11615 case ADDR_EXPR:
11616 return get_name (TREE_OPERAND (stripped_decl, 0));
11617 default:
11618 return NULL;
11619 }
11620 }
11621 }
11622
11623 /* Return true if TYPE has a variable argument list. */
11624
11625 bool
11626 stdarg_p (const_tree fntype)
11627 {
11628 function_args_iterator args_iter;
11629 tree n = NULL_TREE, t;
11630
11631 if (!fntype)
11632 return false;
11633
11634 if (TYPE_NO_NAMED_ARGS_STDARG_P (fntype))
11635 return true;
11636
11637 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11638 {
11639 n = t;
11640 }
11641
11642 return n != NULL_TREE && n != void_type_node;
11643 }
11644
11645 /* Return true if TYPE has a prototype. */
11646
11647 bool
11648 prototype_p (const_tree fntype)
11649 {
11650 tree t;
11651
11652 gcc_assert (fntype != NULL_TREE);
11653
11654 if (TYPE_NO_NAMED_ARGS_STDARG_P (fntype))
11655 return true;
11656
11657 t = TYPE_ARG_TYPES (fntype);
11658 return (t != NULL_TREE);
11659 }
11660
11661 /* If BLOCK is inlined from an __attribute__((__artificial__))
11662 routine, return pointer to location from where it has been
11663 called. */
11664 location_t *
11665 block_nonartificial_location (tree block)
11666 {
11667 location_t *ret = NULL;
11668
11669 while (block && TREE_CODE (block) == BLOCK
11670 && BLOCK_ABSTRACT_ORIGIN (block))
11671 {
11672 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11673 if (TREE_CODE (ao) == FUNCTION_DECL)
11674 {
11675 /* If AO is an artificial inline, point RET to the
11676 call site locus at which it has been inlined and continue
11677 the loop, in case AO's caller is also an artificial
11678 inline. */
11679 if (DECL_DECLARED_INLINE_P (ao)
11680 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11681 ret = &BLOCK_SOURCE_LOCATION (block);
11682 else
11683 break;
11684 }
11685 else if (TREE_CODE (ao) != BLOCK)
11686 break;
11687
11688 block = BLOCK_SUPERCONTEXT (block);
11689 }
11690 return ret;
11691 }
11692
11693
11694 /* If EXP is inlined from an __attribute__((__artificial__))
11695 function, return the location of the original call expression. */
11696
11697 location_t
11698 tree_nonartificial_location (tree exp)
11699 {
11700 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11701
11702 if (loc)
11703 return *loc;
11704 else
11705 return EXPR_LOCATION (exp);
11706 }
11707
11708 /* Return the location into which EXP has been inlined. Analogous
11709 to tree_nonartificial_location() above but not limited to artificial
11710 functions declared inline. If SYSTEM_HEADER is true, return
11711 the macro expansion point of the location if it's in a system header */
11712
11713 location_t
11714 tree_inlined_location (tree exp, bool system_header /* = true */)
11715 {
11716 location_t loc = UNKNOWN_LOCATION;
11717
11718 tree block = TREE_BLOCK (exp);
11719
11720 while (block && TREE_CODE (block) == BLOCK
11721 && BLOCK_ABSTRACT_ORIGIN (block))
11722 {
11723 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11724 if (TREE_CODE (ao) == FUNCTION_DECL)
11725 loc = BLOCK_SOURCE_LOCATION (block);
11726 else if (TREE_CODE (ao) != BLOCK)
11727 break;
11728
11729 block = BLOCK_SUPERCONTEXT (block);
11730 }
11731
11732 if (loc == UNKNOWN_LOCATION)
11733 {
11734 loc = EXPR_LOCATION (exp);
11735 if (system_header)
11736 /* Only consider macro expansion when the block traversal failed
11737 to find a location. Otherwise it's not relevant. */
11738 return expansion_point_location_if_in_system_header (loc);
11739 }
11740
11741 return loc;
11742 }
11743
11744 /* These are the hash table functions for the hash table of OPTIMIZATION_NODE
11745 nodes. */
11746
11747 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11748
11749 hashval_t
11750 cl_option_hasher::hash (tree x)
11751 {
11752 const_tree const t = x;
11753
11754 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11755 return cl_optimization_hash (TREE_OPTIMIZATION (t));
11756 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11757 return cl_target_option_hash (TREE_TARGET_OPTION (t));
11758 else
11759 gcc_unreachable ();
11760 }
11761
11762 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11763 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11764 same. */
11765
11766 bool
11767 cl_option_hasher::equal (tree x, tree y)
11768 {
11769 const_tree const xt = x;
11770 const_tree const yt = y;
11771
11772 if (TREE_CODE (xt) != TREE_CODE (yt))
11773 return 0;
11774
11775 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11776 return cl_optimization_option_eq (TREE_OPTIMIZATION (xt),
11777 TREE_OPTIMIZATION (yt));
11778 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11779 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
11780 TREE_TARGET_OPTION (yt));
11781 else
11782 gcc_unreachable ();
11783 }
11784
11785 /* Build an OPTIMIZATION_NODE based on the options in OPTS and OPTS_SET. */
11786
11787 tree
11788 build_optimization_node (struct gcc_options *opts,
11789 struct gcc_options *opts_set)
11790 {
11791 tree t;
11792
11793 /* Use the cache of optimization nodes. */
11794
11795 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11796 opts, opts_set);
11797
11798 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
11799 t = *slot;
11800 if (!t)
11801 {
11802 /* Insert this one into the hash table. */
11803 t = cl_optimization_node;
11804 *slot = t;
11805
11806 /* Make a new node for next time round. */
11807 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11808 }
11809
11810 return t;
11811 }
11812
11813 /* Build a TARGET_OPTION_NODE based on the options in OPTS and OPTS_SET. */
11814
11815 tree
11816 build_target_option_node (struct gcc_options *opts,
11817 struct gcc_options *opts_set)
11818 {
11819 tree t;
11820
11821 /* Use the cache of optimization nodes. */
11822
11823 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11824 opts, opts_set);
11825
11826 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
11827 t = *slot;
11828 if (!t)
11829 {
11830 /* Insert this one into the hash table. */
11831 t = cl_target_option_node;
11832 *slot = t;
11833
11834 /* Make a new node for next time round. */
11835 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11836 }
11837
11838 return t;
11839 }
11840
11841 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11842 so that they aren't saved during PCH writing. */
11843
11844 void
11845 prepare_target_option_nodes_for_pch (void)
11846 {
11847 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
11848 for (; iter != cl_option_hash_table->end (); ++iter)
11849 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
11850 TREE_TARGET_GLOBALS (*iter) = NULL;
11851 }
11852
11853 /* Determine the "ultimate origin" of a block. */
11854
11855 tree
11856 block_ultimate_origin (const_tree block)
11857 {
11858 tree origin = BLOCK_ABSTRACT_ORIGIN (block);
11859
11860 if (origin == NULL_TREE)
11861 return NULL_TREE;
11862 else
11863 {
11864 gcc_checking_assert ((DECL_P (origin)
11865 && DECL_ORIGIN (origin) == origin)
11866 || BLOCK_ORIGIN (origin) == origin);
11867 return origin;
11868 }
11869 }
11870
11871 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
11872 no instruction. */
11873
11874 bool
11875 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
11876 {
11877 /* Do not strip casts into or out of differing address spaces. */
11878 if (POINTER_TYPE_P (outer_type)
11879 && TYPE_ADDR_SPACE (TREE_TYPE (outer_type)) != ADDR_SPACE_GENERIC)
11880 {
11881 if (!POINTER_TYPE_P (inner_type)
11882 || (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
11883 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type))))
11884 return false;
11885 }
11886 else if (POINTER_TYPE_P (inner_type)
11887 && TYPE_ADDR_SPACE (TREE_TYPE (inner_type)) != ADDR_SPACE_GENERIC)
11888 {
11889 /* We already know that outer_type is not a pointer with
11890 a non-generic address space. */
11891 return false;
11892 }
11893
11894 /* Use precision rather then machine mode when we can, which gives
11895 the correct answer even for submode (bit-field) types. */
11896 if ((INTEGRAL_TYPE_P (outer_type)
11897 || POINTER_TYPE_P (outer_type)
11898 || TREE_CODE (outer_type) == OFFSET_TYPE)
11899 && (INTEGRAL_TYPE_P (inner_type)
11900 || POINTER_TYPE_P (inner_type)
11901 || TREE_CODE (inner_type) == OFFSET_TYPE))
11902 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
11903
11904 /* Otherwise fall back on comparing machine modes (e.g. for
11905 aggregate types, floats). */
11906 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
11907 }
11908
11909 /* Return true iff conversion in EXP generates no instruction. Mark
11910 it inline so that we fully inline into the stripping functions even
11911 though we have two uses of this function. */
11912
11913 static inline bool
11914 tree_nop_conversion (const_tree exp)
11915 {
11916 tree outer_type, inner_type;
11917
11918 if (location_wrapper_p (exp))
11919 return true;
11920 if (!CONVERT_EXPR_P (exp)
11921 && TREE_CODE (exp) != NON_LVALUE_EXPR)
11922 return false;
11923
11924 outer_type = TREE_TYPE (exp);
11925 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11926 if (!inner_type || inner_type == error_mark_node)
11927 return false;
11928
11929 return tree_nop_conversion_p (outer_type, inner_type);
11930 }
11931
11932 /* Return true iff conversion in EXP generates no instruction. Don't
11933 consider conversions changing the signedness. */
11934
11935 static bool
11936 tree_sign_nop_conversion (const_tree exp)
11937 {
11938 tree outer_type, inner_type;
11939
11940 if (!tree_nop_conversion (exp))
11941 return false;
11942
11943 outer_type = TREE_TYPE (exp);
11944 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11945
11946 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
11947 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
11948 }
11949
11950 /* Strip conversions from EXP according to tree_nop_conversion and
11951 return the resulting expression. */
11952
11953 tree
11954 tree_strip_nop_conversions (tree exp)
11955 {
11956 while (tree_nop_conversion (exp))
11957 exp = TREE_OPERAND (exp, 0);
11958 return exp;
11959 }
11960
11961 /* Strip conversions from EXP according to tree_sign_nop_conversion
11962 and return the resulting expression. */
11963
11964 tree
11965 tree_strip_sign_nop_conversions (tree exp)
11966 {
11967 while (tree_sign_nop_conversion (exp))
11968 exp = TREE_OPERAND (exp, 0);
11969 return exp;
11970 }
11971
11972 /* Avoid any floating point extensions from EXP. */
11973 tree
11974 strip_float_extensions (tree exp)
11975 {
11976 tree sub, expt, subt;
11977
11978 /* For floating point constant look up the narrowest type that can hold
11979 it properly and handle it like (type)(narrowest_type)constant.
11980 This way we can optimize for instance a=a*2.0 where "a" is float
11981 but 2.0 is double constant. */
11982 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
11983 {
11984 REAL_VALUE_TYPE orig;
11985 tree type = NULL;
11986
11987 orig = TREE_REAL_CST (exp);
11988 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
11989 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
11990 type = float_type_node;
11991 else if (TYPE_PRECISION (TREE_TYPE (exp))
11992 > TYPE_PRECISION (double_type_node)
11993 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
11994 type = double_type_node;
11995 if (type)
11996 return build_real_truncate (type, orig);
11997 }
11998
11999 if (!CONVERT_EXPR_P (exp))
12000 return exp;
12001
12002 sub = TREE_OPERAND (exp, 0);
12003 subt = TREE_TYPE (sub);
12004 expt = TREE_TYPE (exp);
12005
12006 if (!FLOAT_TYPE_P (subt))
12007 return exp;
12008
12009 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
12010 return exp;
12011
12012 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
12013 return exp;
12014
12015 return strip_float_extensions (sub);
12016 }
12017
12018 /* Strip out all handled components that produce invariant
12019 offsets. */
12020
12021 const_tree
12022 strip_invariant_refs (const_tree op)
12023 {
12024 while (handled_component_p (op))
12025 {
12026 switch (TREE_CODE (op))
12027 {
12028 case ARRAY_REF:
12029 case ARRAY_RANGE_REF:
12030 if (!is_gimple_constant (TREE_OPERAND (op, 1))
12031 || TREE_OPERAND (op, 2) != NULL_TREE
12032 || TREE_OPERAND (op, 3) != NULL_TREE)
12033 return NULL;
12034 break;
12035
12036 case COMPONENT_REF:
12037 if (TREE_OPERAND (op, 2) != NULL_TREE)
12038 return NULL;
12039 break;
12040
12041 default:;
12042 }
12043 op = TREE_OPERAND (op, 0);
12044 }
12045
12046 return op;
12047 }
12048
12049 /* Strip handled components with zero offset from OP. */
12050
12051 tree
12052 strip_zero_offset_components (tree op)
12053 {
12054 while (TREE_CODE (op) == COMPONENT_REF
12055 && integer_zerop (DECL_FIELD_OFFSET (TREE_OPERAND (op, 1)))
12056 && integer_zerop (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (op, 1))))
12057 op = TREE_OPERAND (op, 0);
12058 return op;
12059 }
12060
12061 static GTY(()) tree gcc_eh_personality_decl;
12062
12063 /* Return the GCC personality function decl. */
12064
12065 tree
12066 lhd_gcc_personality (void)
12067 {
12068 if (!gcc_eh_personality_decl)
12069 gcc_eh_personality_decl = build_personality_function ("gcc");
12070 return gcc_eh_personality_decl;
12071 }
12072
12073 /* TARGET is a call target of GIMPLE call statement
12074 (obtained by gimple_call_fn). Return true if it is
12075 OBJ_TYPE_REF representing an virtual call of C++ method.
12076 (As opposed to OBJ_TYPE_REF representing objc calls
12077 through a cast where middle-end devirtualization machinery
12078 can't apply.) FOR_DUMP_P is true when being called from
12079 the dump routines. */
12080
12081 bool
12082 virtual_method_call_p (const_tree target, bool for_dump_p)
12083 {
12084 if (TREE_CODE (target) != OBJ_TYPE_REF)
12085 return false;
12086 tree t = TREE_TYPE (target);
12087 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
12088 t = TREE_TYPE (t);
12089 if (TREE_CODE (t) == FUNCTION_TYPE)
12090 return false;
12091 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
12092 /* If we do not have BINFO associated, it means that type was built
12093 without devirtualization enabled. Do not consider this a virtual
12094 call. */
12095 if (!TYPE_BINFO (obj_type_ref_class (target, for_dump_p)))
12096 return false;
12097 return true;
12098 }
12099
12100 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
12101
12102 static tree
12103 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
12104 {
12105 unsigned int i;
12106 tree base_binfo, b;
12107
12108 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12109 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
12110 && types_same_for_odr (TREE_TYPE (base_binfo), type))
12111 return base_binfo;
12112 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
12113 return b;
12114 return NULL;
12115 }
12116
12117 /* Try to find a base info of BINFO that would have its field decl at offset
12118 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
12119 found, return, otherwise return NULL_TREE. */
12120
12121 tree
12122 get_binfo_at_offset (tree binfo, poly_int64 offset, tree expected_type)
12123 {
12124 tree type = BINFO_TYPE (binfo);
12125
12126 while (true)
12127 {
12128 HOST_WIDE_INT pos, size;
12129 tree fld;
12130 int i;
12131
12132 if (types_same_for_odr (type, expected_type))
12133 return binfo;
12134 if (maybe_lt (offset, 0))
12135 return NULL_TREE;
12136
12137 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
12138 {
12139 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
12140 continue;
12141
12142 pos = int_bit_position (fld);
12143 size = tree_to_uhwi (DECL_SIZE (fld));
12144 if (known_in_range_p (offset, pos, size))
12145 break;
12146 }
12147 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
12148 return NULL_TREE;
12149
12150 /* Offset 0 indicates the primary base, whose vtable contents are
12151 represented in the binfo for the derived class. */
12152 else if (maybe_ne (offset, 0))
12153 {
12154 tree found_binfo = NULL, base_binfo;
12155 /* Offsets in BINFO are in bytes relative to the whole structure
12156 while POS is in bits relative to the containing field. */
12157 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
12158 / BITS_PER_UNIT);
12159
12160 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12161 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
12162 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
12163 {
12164 found_binfo = base_binfo;
12165 break;
12166 }
12167 if (found_binfo)
12168 binfo = found_binfo;
12169 else
12170 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
12171 binfo_offset);
12172 }
12173
12174 type = TREE_TYPE (fld);
12175 offset -= pos;
12176 }
12177 }
12178
12179 /* Returns true if X is a typedef decl. */
12180
12181 bool
12182 is_typedef_decl (const_tree x)
12183 {
12184 return (x && TREE_CODE (x) == TYPE_DECL
12185 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
12186 }
12187
12188 /* Returns true iff TYPE is a type variant created for a typedef. */
12189
12190 bool
12191 typedef_variant_p (const_tree type)
12192 {
12193 return is_typedef_decl (TYPE_NAME (type));
12194 }
12195
12196 /* PR 84195: Replace control characters in "unescaped" with their
12197 escaped equivalents. Allow newlines if -fmessage-length has
12198 been set to a non-zero value. This is done here, rather than
12199 where the attribute is recorded as the message length can
12200 change between these two locations. */
12201
12202 void
12203 escaped_string::escape (const char *unescaped)
12204 {
12205 char *escaped;
12206 size_t i, new_i, len;
12207
12208 if (m_owned)
12209 free (m_str);
12210
12211 m_str = const_cast<char *> (unescaped);
12212 m_owned = false;
12213
12214 if (unescaped == NULL || *unescaped == 0)
12215 return;
12216
12217 len = strlen (unescaped);
12218 escaped = NULL;
12219 new_i = 0;
12220
12221 for (i = 0; i < len; i++)
12222 {
12223 char c = unescaped[i];
12224
12225 if (!ISCNTRL (c))
12226 {
12227 if (escaped)
12228 escaped[new_i++] = c;
12229 continue;
12230 }
12231
12232 if (c != '\n' || !pp_is_wrapping_line (global_dc->printer))
12233 {
12234 if (escaped == NULL)
12235 {
12236 /* We only allocate space for a new string if we
12237 actually encounter a control character that
12238 needs replacing. */
12239 escaped = (char *) xmalloc (len * 2 + 1);
12240 strncpy (escaped, unescaped, i);
12241 new_i = i;
12242 }
12243
12244 escaped[new_i++] = '\\';
12245
12246 switch (c)
12247 {
12248 case '\a': escaped[new_i++] = 'a'; break;
12249 case '\b': escaped[new_i++] = 'b'; break;
12250 case '\f': escaped[new_i++] = 'f'; break;
12251 case '\n': escaped[new_i++] = 'n'; break;
12252 case '\r': escaped[new_i++] = 'r'; break;
12253 case '\t': escaped[new_i++] = 't'; break;
12254 case '\v': escaped[new_i++] = 'v'; break;
12255 default: escaped[new_i++] = '?'; break;
12256 }
12257 }
12258 else if (escaped)
12259 escaped[new_i++] = c;
12260 }
12261
12262 if (escaped)
12263 {
12264 escaped[new_i] = 0;
12265 m_str = escaped;
12266 m_owned = true;
12267 }
12268 }
12269
12270 /* Warn about a use of an identifier which was marked deprecated. Returns
12271 whether a warning was given. */
12272
12273 bool
12274 warn_deprecated_use (tree node, tree attr)
12275 {
12276 escaped_string msg;
12277
12278 if (node == 0 || !warn_deprecated_decl)
12279 return false;
12280
12281 if (!attr)
12282 {
12283 if (DECL_P (node))
12284 attr = DECL_ATTRIBUTES (node);
12285 else if (TYPE_P (node))
12286 {
12287 tree decl = TYPE_STUB_DECL (node);
12288 if (decl)
12289 attr = TYPE_ATTRIBUTES (TREE_TYPE (decl));
12290 else if ((decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (node)))
12291 != NULL_TREE)
12292 {
12293 node = TREE_TYPE (decl);
12294 attr = TYPE_ATTRIBUTES (node);
12295 }
12296 }
12297 }
12298
12299 if (attr)
12300 attr = lookup_attribute ("deprecated", attr);
12301
12302 if (attr)
12303 msg.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
12304
12305 bool w = false;
12306 if (DECL_P (node))
12307 {
12308 auto_diagnostic_group d;
12309 if (msg)
12310 w = warning (OPT_Wdeprecated_declarations,
12311 "%qD is deprecated: %s", node, (const char *) msg);
12312 else
12313 w = warning (OPT_Wdeprecated_declarations,
12314 "%qD is deprecated", node);
12315 if (w)
12316 inform (DECL_SOURCE_LOCATION (node), "declared here");
12317 }
12318 else if (TYPE_P (node))
12319 {
12320 tree what = NULL_TREE;
12321 tree decl = TYPE_STUB_DECL (node);
12322
12323 if (TYPE_NAME (node))
12324 {
12325 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12326 what = TYPE_NAME (node);
12327 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12328 && DECL_NAME (TYPE_NAME (node)))
12329 what = DECL_NAME (TYPE_NAME (node));
12330 }
12331
12332 auto_diagnostic_group d;
12333 if (what)
12334 {
12335 if (msg)
12336 w = warning (OPT_Wdeprecated_declarations,
12337 "%qE is deprecated: %s", what, (const char *) msg);
12338 else
12339 w = warning (OPT_Wdeprecated_declarations,
12340 "%qE is deprecated", what);
12341 }
12342 else
12343 {
12344 if (msg)
12345 w = warning (OPT_Wdeprecated_declarations,
12346 "type is deprecated: %s", (const char *) msg);
12347 else
12348 w = warning (OPT_Wdeprecated_declarations,
12349 "type is deprecated");
12350 }
12351
12352 if (w && decl)
12353 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12354 }
12355
12356 return w;
12357 }
12358
12359 /* Error out with an identifier which was marked 'unavailable'. */
12360 void
12361 error_unavailable_use (tree node, tree attr)
12362 {
12363 escaped_string msg;
12364
12365 if (node == 0)
12366 return;
12367
12368 if (!attr)
12369 {
12370 if (DECL_P (node))
12371 attr = DECL_ATTRIBUTES (node);
12372 else if (TYPE_P (node))
12373 {
12374 tree decl = TYPE_STUB_DECL (node);
12375 if (decl)
12376 attr = lookup_attribute ("unavailable",
12377 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12378 }
12379 }
12380
12381 if (attr)
12382 attr = lookup_attribute ("unavailable", attr);
12383
12384 if (attr)
12385 msg.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
12386
12387 if (DECL_P (node))
12388 {
12389 auto_diagnostic_group d;
12390 if (msg)
12391 error ("%qD is unavailable: %s", node, (const char *) msg);
12392 else
12393 error ("%qD is unavailable", node);
12394 inform (DECL_SOURCE_LOCATION (node), "declared here");
12395 }
12396 else if (TYPE_P (node))
12397 {
12398 tree what = NULL_TREE;
12399 tree decl = TYPE_STUB_DECL (node);
12400
12401 if (TYPE_NAME (node))
12402 {
12403 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12404 what = TYPE_NAME (node);
12405 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12406 && DECL_NAME (TYPE_NAME (node)))
12407 what = DECL_NAME (TYPE_NAME (node));
12408 }
12409
12410 auto_diagnostic_group d;
12411 if (what)
12412 {
12413 if (msg)
12414 error ("%qE is unavailable: %s", what, (const char *) msg);
12415 else
12416 error ("%qE is unavailable", what);
12417 }
12418 else
12419 {
12420 if (msg)
12421 error ("type is unavailable: %s", (const char *) msg);
12422 else
12423 error ("type is unavailable");
12424 }
12425
12426 if (decl)
12427 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12428 }
12429 }
12430
12431 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12432 somewhere in it. */
12433
12434 bool
12435 contains_bitfld_component_ref_p (const_tree ref)
12436 {
12437 while (handled_component_p (ref))
12438 {
12439 if (TREE_CODE (ref) == COMPONENT_REF
12440 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12441 return true;
12442 ref = TREE_OPERAND (ref, 0);
12443 }
12444
12445 return false;
12446 }
12447
12448 /* Try to determine whether a TRY_CATCH expression can fall through.
12449 This is a subroutine of block_may_fallthru. */
12450
12451 static bool
12452 try_catch_may_fallthru (const_tree stmt)
12453 {
12454 tree_stmt_iterator i;
12455
12456 /* If the TRY block can fall through, the whole TRY_CATCH can
12457 fall through. */
12458 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12459 return true;
12460
12461 i = tsi_start (TREE_OPERAND (stmt, 1));
12462 switch (TREE_CODE (tsi_stmt (i)))
12463 {
12464 case CATCH_EXPR:
12465 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12466 catch expression and a body. The whole TRY_CATCH may fall
12467 through iff any of the catch bodies falls through. */
12468 for (; !tsi_end_p (i); tsi_next (&i))
12469 {
12470 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12471 return true;
12472 }
12473 return false;
12474
12475 case EH_FILTER_EXPR:
12476 /* The exception filter expression only matters if there is an
12477 exception. If the exception does not match EH_FILTER_TYPES,
12478 we will execute EH_FILTER_FAILURE, and we will fall through
12479 if that falls through. If the exception does match
12480 EH_FILTER_TYPES, the stack unwinder will continue up the
12481 stack, so we will not fall through. We don't know whether we
12482 will throw an exception which matches EH_FILTER_TYPES or not,
12483 so we just ignore EH_FILTER_TYPES and assume that we might
12484 throw an exception which doesn't match. */
12485 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12486
12487 default:
12488 /* This case represents statements to be executed when an
12489 exception occurs. Those statements are implicitly followed
12490 by a RESX statement to resume execution after the exception.
12491 So in this case the TRY_CATCH never falls through. */
12492 return false;
12493 }
12494 }
12495
12496 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12497 need not be 100% accurate; simply be conservative and return true if we
12498 don't know. This is used only to avoid stupidly generating extra code.
12499 If we're wrong, we'll just delete the extra code later. */
12500
12501 bool
12502 block_may_fallthru (const_tree block)
12503 {
12504 /* This CONST_CAST is okay because expr_last returns its argument
12505 unmodified and we assign it to a const_tree. */
12506 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12507
12508 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12509 {
12510 case GOTO_EXPR:
12511 case RETURN_EXPR:
12512 /* Easy cases. If the last statement of the block implies
12513 control transfer, then we can't fall through. */
12514 return false;
12515
12516 case SWITCH_EXPR:
12517 /* If there is a default: label or case labels cover all possible
12518 SWITCH_COND values, then the SWITCH_EXPR will transfer control
12519 to some case label in all cases and all we care is whether the
12520 SWITCH_BODY falls through. */
12521 if (SWITCH_ALL_CASES_P (stmt))
12522 return block_may_fallthru (SWITCH_BODY (stmt));
12523 return true;
12524
12525 case COND_EXPR:
12526 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12527 return true;
12528 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12529
12530 case BIND_EXPR:
12531 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12532
12533 case TRY_CATCH_EXPR:
12534 return try_catch_may_fallthru (stmt);
12535
12536 case TRY_FINALLY_EXPR:
12537 /* The finally clause is always executed after the try clause,
12538 so if it does not fall through, then the try-finally will not
12539 fall through. Otherwise, if the try clause does not fall
12540 through, then when the finally clause falls through it will
12541 resume execution wherever the try clause was going. So the
12542 whole try-finally will only fall through if both the try
12543 clause and the finally clause fall through. */
12544 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12545 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12546
12547 case EH_ELSE_EXPR:
12548 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12549
12550 case MODIFY_EXPR:
12551 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12552 stmt = TREE_OPERAND (stmt, 1);
12553 else
12554 return true;
12555 /* FALLTHRU */
12556
12557 case CALL_EXPR:
12558 /* Functions that do not return do not fall through. */
12559 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12560
12561 case CLEANUP_POINT_EXPR:
12562 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12563
12564 case TARGET_EXPR:
12565 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12566
12567 case ERROR_MARK:
12568 return true;
12569
12570 default:
12571 return lang_hooks.block_may_fallthru (stmt);
12572 }
12573 }
12574
12575 /* True if we are using EH to handle cleanups. */
12576 static bool using_eh_for_cleanups_flag = false;
12577
12578 /* This routine is called from front ends to indicate eh should be used for
12579 cleanups. */
12580 void
12581 using_eh_for_cleanups (void)
12582 {
12583 using_eh_for_cleanups_flag = true;
12584 }
12585
12586 /* Query whether EH is used for cleanups. */
12587 bool
12588 using_eh_for_cleanups_p (void)
12589 {
12590 return using_eh_for_cleanups_flag;
12591 }
12592
12593 /* Wrapper for tree_code_name to ensure that tree code is valid */
12594 const char *
12595 get_tree_code_name (enum tree_code code)
12596 {
12597 const char *invalid = "<invalid tree code>";
12598
12599 /* The tree_code enum promotes to signed, but we could be getting
12600 invalid values, so force an unsigned comparison. */
12601 if (unsigned (code) >= MAX_TREE_CODES)
12602 {
12603 if ((unsigned)code == 0xa5a5)
12604 return "ggc_freed";
12605 return invalid;
12606 }
12607
12608 return tree_code_name[code];
12609 }
12610
12611 /* Drops the TREE_OVERFLOW flag from T. */
12612
12613 tree
12614 drop_tree_overflow (tree t)
12615 {
12616 gcc_checking_assert (TREE_OVERFLOW (t));
12617
12618 /* For tree codes with a sharing machinery re-build the result. */
12619 if (poly_int_tree_p (t))
12620 return wide_int_to_tree (TREE_TYPE (t), wi::to_poly_wide (t));
12621
12622 /* For VECTOR_CST, remove the overflow bits from the encoded elements
12623 and canonicalize the result. */
12624 if (TREE_CODE (t) == VECTOR_CST)
12625 {
12626 tree_vector_builder builder;
12627 builder.new_unary_operation (TREE_TYPE (t), t, true);
12628 unsigned int count = builder.encoded_nelts ();
12629 for (unsigned int i = 0; i < count; ++i)
12630 {
12631 tree elt = VECTOR_CST_ELT (t, i);
12632 if (TREE_OVERFLOW (elt))
12633 elt = drop_tree_overflow (elt);
12634 builder.quick_push (elt);
12635 }
12636 return builder.build ();
12637 }
12638
12639 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12640 and drop the flag. */
12641 t = copy_node (t);
12642 TREE_OVERFLOW (t) = 0;
12643
12644 /* For constants that contain nested constants, drop the flag
12645 from those as well. */
12646 if (TREE_CODE (t) == COMPLEX_CST)
12647 {
12648 if (TREE_OVERFLOW (TREE_REALPART (t)))
12649 TREE_REALPART (t) = drop_tree_overflow (TREE_REALPART (t));
12650 if (TREE_OVERFLOW (TREE_IMAGPART (t)))
12651 TREE_IMAGPART (t) = drop_tree_overflow (TREE_IMAGPART (t));
12652 }
12653
12654 return t;
12655 }
12656
12657 /* Given a memory reference expression T, return its base address.
12658 The base address of a memory reference expression is the main
12659 object being referenced. For instance, the base address for
12660 'array[i].fld[j]' is 'array'. You can think of this as stripping
12661 away the offset part from a memory address.
12662
12663 This function calls handled_component_p to strip away all the inner
12664 parts of the memory reference until it reaches the base object. */
12665
12666 tree
12667 get_base_address (tree t)
12668 {
12669 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12670 t = TREE_OPERAND (t, 0);
12671 while (handled_component_p (t))
12672 t = TREE_OPERAND (t, 0);
12673
12674 if ((TREE_CODE (t) == MEM_REF
12675 || TREE_CODE (t) == TARGET_MEM_REF)
12676 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12677 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12678
12679 return t;
12680 }
12681
12682 /* Return a tree of sizetype representing the size, in bytes, of the element
12683 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12684
12685 tree
12686 array_ref_element_size (tree exp)
12687 {
12688 tree aligned_size = TREE_OPERAND (exp, 3);
12689 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
12690 location_t loc = EXPR_LOCATION (exp);
12691
12692 /* If a size was specified in the ARRAY_REF, it's the size measured
12693 in alignment units of the element type. So multiply by that value. */
12694 if (aligned_size)
12695 {
12696 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12697 sizetype from another type of the same width and signedness. */
12698 if (TREE_TYPE (aligned_size) != sizetype)
12699 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
12700 return size_binop_loc (loc, MULT_EXPR, aligned_size,
12701 size_int (TYPE_ALIGN_UNIT (elmt_type)));
12702 }
12703
12704 /* Otherwise, take the size from that of the element type. Substitute
12705 any PLACEHOLDER_EXPR that we have. */
12706 else
12707 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
12708 }
12709
12710 /* Return a tree representing the lower bound of the array mentioned in
12711 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12712
12713 tree
12714 array_ref_low_bound (tree exp)
12715 {
12716 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12717
12718 /* If a lower bound is specified in EXP, use it. */
12719 if (TREE_OPERAND (exp, 2))
12720 return TREE_OPERAND (exp, 2);
12721
12722 /* Otherwise, if there is a domain type and it has a lower bound, use it,
12723 substituting for a PLACEHOLDER_EXPR as needed. */
12724 if (domain_type && TYPE_MIN_VALUE (domain_type))
12725 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
12726
12727 /* Otherwise, return a zero of the appropriate type. */
12728 tree idxtype = TREE_TYPE (TREE_OPERAND (exp, 1));
12729 return (idxtype == error_mark_node
12730 ? integer_zero_node : build_int_cst (idxtype, 0));
12731 }
12732
12733 /* Return a tree representing the upper bound of the array mentioned in
12734 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12735
12736 tree
12737 array_ref_up_bound (tree exp)
12738 {
12739 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12740
12741 /* If there is a domain type and it has an upper bound, use it, substituting
12742 for a PLACEHOLDER_EXPR as needed. */
12743 if (domain_type && TYPE_MAX_VALUE (domain_type))
12744 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
12745
12746 /* Otherwise fail. */
12747 return NULL_TREE;
12748 }
12749
12750 /* Returns true if REF is an array reference, a component reference,
12751 or a memory reference to an array whose actual size might be larger
12752 than its upper bound implies, there are multiple cases:
12753 A. a ref to a flexible array member at the end of a structure;
12754 B. a ref to an array with a different type against the original decl;
12755 for example:
12756
12757 short a[16] = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16 };
12758 (*((char(*)[16])&a[0]))[i+8]
12759
12760 C. a ref to an array that was passed as a parameter;
12761 for example:
12762
12763 int test (uint8_t *p, uint32_t t[1][1], int n) {
12764 for (int i = 0; i < 4; i++, p++)
12765 t[i][0] = ...;
12766
12767 If non-null, set IS_TRAILING_ARRAY to true if the ref is the above case A.
12768 */
12769
12770 bool
12771 array_ref_flexible_size_p (tree ref, bool *is_trailing_array /* = NULL */)
12772 {
12773 /* The TYPE for this array referece. */
12774 tree atype = NULL_TREE;
12775 /* The FIELD_DECL for the array field in the containing structure. */
12776 tree afield_decl = NULL_TREE;
12777 /* Whether this array is the trailing array of a structure. */
12778 bool is_trailing_array_tmp = false;
12779 if (!is_trailing_array)
12780 is_trailing_array = &is_trailing_array_tmp;
12781
12782 if (TREE_CODE (ref) == ARRAY_REF
12783 || TREE_CODE (ref) == ARRAY_RANGE_REF)
12784 {
12785 atype = TREE_TYPE (TREE_OPERAND (ref, 0));
12786 ref = TREE_OPERAND (ref, 0);
12787 }
12788 else if (TREE_CODE (ref) == COMPONENT_REF
12789 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE)
12790 {
12791 atype = TREE_TYPE (TREE_OPERAND (ref, 1));
12792 afield_decl = TREE_OPERAND (ref, 1);
12793 }
12794 else if (TREE_CODE (ref) == MEM_REF)
12795 {
12796 tree arg = TREE_OPERAND (ref, 0);
12797 if (TREE_CODE (arg) == ADDR_EXPR)
12798 arg = TREE_OPERAND (arg, 0);
12799 tree argtype = TREE_TYPE (arg);
12800 if (TREE_CODE (argtype) == RECORD_TYPE)
12801 {
12802 if (tree fld = last_field (argtype))
12803 {
12804 atype = TREE_TYPE (fld);
12805 afield_decl = fld;
12806 if (TREE_CODE (atype) != ARRAY_TYPE)
12807 return false;
12808 if (VAR_P (arg) && DECL_SIZE (fld))
12809 return false;
12810 }
12811 else
12812 return false;
12813 }
12814 else
12815 return false;
12816 }
12817 else
12818 return false;
12819
12820 if (TREE_CODE (ref) == STRING_CST)
12821 return false;
12822
12823 tree ref_to_array = ref;
12824 while (handled_component_p (ref))
12825 {
12826 /* If the reference chain contains a component reference to a
12827 non-union type and there follows another field the reference
12828 is not at the end of a structure. */
12829 if (TREE_CODE (ref) == COMPONENT_REF)
12830 {
12831 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
12832 {
12833 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
12834 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
12835 nextf = DECL_CHAIN (nextf);
12836 if (nextf)
12837 return false;
12838 }
12839 }
12840 /* If we have a multi-dimensional array we do not consider
12841 a non-innermost dimension as flex array if the whole
12842 multi-dimensional array is at struct end.
12843 Same for an array of aggregates with a trailing array
12844 member. */
12845 else if (TREE_CODE (ref) == ARRAY_REF)
12846 return false;
12847 else if (TREE_CODE (ref) == ARRAY_RANGE_REF)
12848 ;
12849 /* If we view an underlying object as sth else then what we
12850 gathered up to now is what we have to rely on. */
12851 else if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
12852 break;
12853 else
12854 gcc_unreachable ();
12855
12856 ref = TREE_OPERAND (ref, 0);
12857 }
12858
12859 gcc_assert (!afield_decl
12860 || (afield_decl && TREE_CODE (afield_decl) == FIELD_DECL));
12861
12862 /* The array now is at struct end. Treat flexible array member as
12863 always subject to extend, even into just padding constrained by
12864 an underlying decl. */
12865 if (! TYPE_SIZE (atype)
12866 || ! TYPE_DOMAIN (atype)
12867 || ! TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
12868 {
12869 *is_trailing_array = afield_decl && TREE_CODE (afield_decl) == FIELD_DECL;
12870 return afield_decl ? !DECL_NOT_FLEXARRAY (afield_decl) : true;
12871 }
12872
12873 /* If the reference is based on a declared entity, the size of the array
12874 is constrained by its given domain. (Do not trust commons PR/69368). */
12875 ref = get_base_address (ref);
12876 if (ref
12877 && DECL_P (ref)
12878 && !(flag_unconstrained_commons
12879 && VAR_P (ref) && DECL_COMMON (ref))
12880 && DECL_SIZE_UNIT (ref)
12881 && TREE_CODE (DECL_SIZE_UNIT (ref)) == INTEGER_CST)
12882 {
12883 /* If the object itself is the array it is not at struct end. */
12884 if (DECL_P (ref_to_array))
12885 return false;
12886
12887 /* Check whether the array domain covers all of the available
12888 padding. */
12889 poly_int64 offset;
12890 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (atype))) != INTEGER_CST
12891 || TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST
12892 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST)
12893 {
12894 *is_trailing_array
12895 = afield_decl && TREE_CODE (afield_decl) == FIELD_DECL;
12896 return afield_decl ? !DECL_NOT_FLEXARRAY (afield_decl) : true;
12897 }
12898 if (! get_addr_base_and_unit_offset (ref_to_array, &offset))
12899 {
12900 *is_trailing_array
12901 = afield_decl && TREE_CODE (afield_decl) == FIELD_DECL;
12902 return afield_decl ? !DECL_NOT_FLEXARRAY (afield_decl) : true;
12903 }
12904
12905 /* If at least one extra element fits it is a flexarray. */
12906 if (known_le ((wi::to_offset (TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
12907 - wi::to_offset (TYPE_MIN_VALUE (TYPE_DOMAIN (atype)))
12908 + 2)
12909 * wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (atype))),
12910 wi::to_offset (DECL_SIZE_UNIT (ref)) - offset))
12911 {
12912 *is_trailing_array
12913 = afield_decl && TREE_CODE (afield_decl) == FIELD_DECL;
12914 return afield_decl ? !DECL_NOT_FLEXARRAY (afield_decl) : true;
12915 }
12916
12917 return false;
12918 }
12919
12920 *is_trailing_array = afield_decl && TREE_CODE (afield_decl) == FIELD_DECL;
12921 return afield_decl ? !DECL_NOT_FLEXARRAY (afield_decl) : true;
12922 }
12923
12924
12925 /* Return a tree representing the offset, in bytes, of the field referenced
12926 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
12927
12928 tree
12929 component_ref_field_offset (tree exp)
12930 {
12931 tree aligned_offset = TREE_OPERAND (exp, 2);
12932 tree field = TREE_OPERAND (exp, 1);
12933 location_t loc = EXPR_LOCATION (exp);
12934
12935 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
12936 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
12937 value. */
12938 if (aligned_offset)
12939 {
12940 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12941 sizetype from another type of the same width and signedness. */
12942 if (TREE_TYPE (aligned_offset) != sizetype)
12943 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
12944 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
12945 size_int (DECL_OFFSET_ALIGN (field)
12946 / BITS_PER_UNIT));
12947 }
12948
12949 /* Otherwise, take the offset from that of the field. Substitute
12950 any PLACEHOLDER_EXPR that we have. */
12951 else
12952 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
12953 }
12954
12955 /* Given the initializer INIT, return the initializer for the field
12956 DECL if it exists, otherwise null. Used to obtain the initializer
12957 for a flexible array member and determine its size. */
12958
12959 static tree
12960 get_initializer_for (tree init, tree decl)
12961 {
12962 STRIP_NOPS (init);
12963
12964 tree fld, fld_init;
12965 unsigned HOST_WIDE_INT i;
12966 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), i, fld, fld_init)
12967 {
12968 if (decl == fld)
12969 return fld_init;
12970
12971 if (TREE_CODE (fld) == CONSTRUCTOR)
12972 {
12973 fld_init = get_initializer_for (fld_init, decl);
12974 if (fld_init)
12975 return fld_init;
12976 }
12977 }
12978
12979 return NULL_TREE;
12980 }
12981
12982 /* Determines the special array member type for the array reference REF. */
12983 special_array_member
12984 component_ref_sam_type (tree ref)
12985 {
12986 special_array_member sam_type = special_array_member::none;
12987
12988 tree member = TREE_OPERAND (ref, 1);
12989 tree memsize = DECL_SIZE_UNIT (member);
12990 if (memsize)
12991 {
12992 tree memtype = TREE_TYPE (member);
12993 if (TREE_CODE (memtype) != ARRAY_TYPE)
12994 return sam_type;
12995
12996 bool trailing = false;
12997 (void)array_ref_flexible_size_p (ref, &trailing);
12998 bool zero_length = integer_zerop (memsize);
12999 if (!trailing && !zero_length)
13000 /* MEMBER is an interior array with
13001 more than one element. */
13002 return special_array_member::int_n;
13003
13004 if (zero_length)
13005 {
13006 if (trailing)
13007 return special_array_member::trail_0;
13008 else
13009 return special_array_member::int_0;
13010 }
13011
13012 if (!zero_length)
13013 if (tree dom = TYPE_DOMAIN (memtype))
13014 if (tree min = TYPE_MIN_VALUE (dom))
13015 if (tree max = TYPE_MAX_VALUE (dom))
13016 if (TREE_CODE (min) == INTEGER_CST
13017 && TREE_CODE (max) == INTEGER_CST)
13018 {
13019 offset_int minidx = wi::to_offset (min);
13020 offset_int maxidx = wi::to_offset (max);
13021 offset_int neltsm1 = maxidx - minidx;
13022 if (neltsm1 > 0)
13023 /* MEMBER is a trailing array with more than
13024 one elements. */
13025 return special_array_member::trail_n;
13026
13027 if (neltsm1 == 0)
13028 return special_array_member::trail_1;
13029 }
13030 }
13031
13032 return sam_type;
13033 }
13034
13035 /* Determines the size of the member referenced by the COMPONENT_REF
13036 REF, using its initializer expression if necessary in order to
13037 determine the size of an initialized flexible array member.
13038 If non-null, set *SAM to the type of special array member.
13039 Returns the size as sizetype (which might be zero for an object
13040 with an uninitialized flexible array member) or null if the size
13041 cannot be determined. */
13042
13043 tree
13044 component_ref_size (tree ref, special_array_member *sam /* = NULL */)
13045 {
13046 gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
13047
13048 special_array_member sambuf;
13049 if (!sam)
13050 sam = &sambuf;
13051 *sam = component_ref_sam_type (ref);
13052
13053 /* The object/argument referenced by the COMPONENT_REF and its type. */
13054 tree arg = TREE_OPERAND (ref, 0);
13055 tree argtype = TREE_TYPE (arg);
13056 /* The referenced member. */
13057 tree member = TREE_OPERAND (ref, 1);
13058
13059 tree memsize = DECL_SIZE_UNIT (member);
13060 if (memsize)
13061 {
13062 tree memtype = TREE_TYPE (member);
13063 if (TREE_CODE (memtype) != ARRAY_TYPE)
13064 /* DECL_SIZE may be less than TYPE_SIZE in C++ when referring
13065 to the type of a class with a virtual base which doesn't
13066 reflect the size of the virtual's members (see pr97595).
13067 If that's the case fail for now and implement something
13068 more robust in the future. */
13069 return (tree_int_cst_equal (memsize, TYPE_SIZE_UNIT (memtype))
13070 ? memsize : NULL_TREE);
13071
13072 /* 2-or-more elements arrays are treated as normal arrays by default. */
13073 if (*sam == special_array_member::int_n
13074 || *sam == special_array_member::trail_n)
13075 return memsize;
13076
13077 /* flag_strict_flex_arrays will control how to treat
13078 the trailing arrays as flexiable array members. */
13079
13080 tree afield_decl = TREE_OPERAND (ref, 1);
13081 unsigned int strict_flex_array_level
13082 = strict_flex_array_level_of (afield_decl);
13083
13084 switch (strict_flex_array_level)
13085 {
13086 case 3:
13087 /* Treaing 0-length trailing arrays as normal array. */
13088 if (*sam == special_array_member::trail_0)
13089 return size_zero_node;
13090 /* FALLTHROUGH. */
13091 case 2:
13092 /* Treating 1-element trailing arrays as normal array. */
13093 if (*sam == special_array_member::trail_1)
13094 return memsize;
13095 /* FALLTHROUGH. */
13096 case 1:
13097 /* Treating 2-or-more elements trailing arrays as normal
13098 array. */
13099 if (*sam == special_array_member::trail_n)
13100 return memsize;
13101 /* FALLTHROUGH. */
13102 case 0:
13103 break;
13104 default:
13105 gcc_unreachable ();
13106 }
13107
13108 if (*sam == special_array_member::int_0)
13109 memsize = NULL_TREE;
13110
13111 /* For a reference to a flexible array member of a union
13112 use the size of the union instead of the size of the member. */
13113 if (TREE_CODE (argtype) == UNION_TYPE)
13114 memsize = TYPE_SIZE_UNIT (argtype);
13115 }
13116
13117 /* MEMBER is either a bona fide flexible array member, or a zero-length
13118 array member, or an array of length one treated as such. */
13119
13120 /* If the reference is to a declared object and the member a true
13121 flexible array, try to determine its size from its initializer. */
13122 poly_int64 baseoff = 0;
13123 tree base = get_addr_base_and_unit_offset (ref, &baseoff);
13124 if (!base || !VAR_P (base))
13125 {
13126 if (*sam != special_array_member::int_0)
13127 return NULL_TREE;
13128
13129 if (TREE_CODE (arg) != COMPONENT_REF)
13130 return NULL_TREE;
13131
13132 base = arg;
13133 while (TREE_CODE (base) == COMPONENT_REF)
13134 base = TREE_OPERAND (base, 0);
13135 baseoff = tree_to_poly_int64 (byte_position (TREE_OPERAND (ref, 1)));
13136 }
13137
13138 /* BASE is the declared object of which MEMBER is either a member
13139 or that is cast to ARGTYPE (e.g., a char buffer used to store
13140 an ARGTYPE object). */
13141 tree basetype = TREE_TYPE (base);
13142
13143 /* Determine the base type of the referenced object. If it's
13144 the same as ARGTYPE and MEMBER has a known size, return it. */
13145 tree bt = basetype;
13146 if (*sam != special_array_member::int_0)
13147 while (TREE_CODE (bt) == ARRAY_TYPE)
13148 bt = TREE_TYPE (bt);
13149 bool typematch = useless_type_conversion_p (argtype, bt);
13150 if (memsize && typematch)
13151 return memsize;
13152
13153 memsize = NULL_TREE;
13154
13155 if (typematch)
13156 /* MEMBER is a true flexible array member. Compute its size from
13157 the initializer of the BASE object if it has one. */
13158 if (tree init = DECL_P (base) ? DECL_INITIAL (base) : NULL_TREE)
13159 if (init != error_mark_node)
13160 {
13161 init = get_initializer_for (init, member);
13162 if (init)
13163 {
13164 memsize = TYPE_SIZE_UNIT (TREE_TYPE (init));
13165 if (tree refsize = TYPE_SIZE_UNIT (argtype))
13166 {
13167 /* Use the larger of the initializer size and the tail
13168 padding in the enclosing struct. */
13169 poly_int64 rsz = tree_to_poly_int64 (refsize);
13170 rsz -= baseoff;
13171 if (known_lt (tree_to_poly_int64 (memsize), rsz))
13172 memsize = wide_int_to_tree (TREE_TYPE (memsize), rsz);
13173 }
13174
13175 baseoff = 0;
13176 }
13177 }
13178
13179 if (!memsize)
13180 {
13181 if (typematch)
13182 {
13183 if (DECL_P (base)
13184 && DECL_EXTERNAL (base)
13185 && bt == basetype
13186 && *sam != special_array_member::int_0)
13187 /* The size of a flexible array member of an extern struct
13188 with no initializer cannot be determined (it's defined
13189 in another translation unit and can have an initializer
13190 with an arbitrary number of elements). */
13191 return NULL_TREE;
13192
13193 /* Use the size of the base struct or, for interior zero-length
13194 arrays, the size of the enclosing type. */
13195 memsize = TYPE_SIZE_UNIT (bt);
13196 }
13197 else if (DECL_P (base))
13198 /* Use the size of the BASE object (possibly an array of some
13199 other type such as char used to store the struct). */
13200 memsize = DECL_SIZE_UNIT (base);
13201 else
13202 return NULL_TREE;
13203 }
13204
13205 /* If the flexible array member has a known size use the greater
13206 of it and the tail padding in the enclosing struct.
13207 Otherwise, when the size of the flexible array member is unknown
13208 and the referenced object is not a struct, use the size of its
13209 type when known. This detects sizes of array buffers when cast
13210 to struct types with flexible array members. */
13211 if (memsize)
13212 {
13213 if (!tree_fits_poly_int64_p (memsize))
13214 return NULL_TREE;
13215 poly_int64 memsz64 = memsize ? tree_to_poly_int64 (memsize) : 0;
13216 if (known_lt (baseoff, memsz64))
13217 {
13218 memsz64 -= baseoff;
13219 return wide_int_to_tree (TREE_TYPE (memsize), memsz64);
13220 }
13221 return size_zero_node;
13222 }
13223
13224 /* Return "don't know" for an external non-array object since its
13225 flexible array member can be initialized to have any number of
13226 elements. Otherwise, return zero because the flexible array
13227 member has no elements. */
13228 return (DECL_P (base)
13229 && DECL_EXTERNAL (base)
13230 && (!typematch
13231 || TREE_CODE (basetype) != ARRAY_TYPE)
13232 ? NULL_TREE : size_zero_node);
13233 }
13234
13235 /* Return the machine mode of T. For vectors, returns the mode of the
13236 inner type. The main use case is to feed the result to HONOR_NANS,
13237 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
13238
13239 machine_mode
13240 element_mode (const_tree t)
13241 {
13242 if (!TYPE_P (t))
13243 t = TREE_TYPE (t);
13244 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
13245 t = TREE_TYPE (t);
13246 return TYPE_MODE (t);
13247 }
13248
13249 /* Vector types need to re-check the target flags each time we report
13250 the machine mode. We need to do this because attribute target can
13251 change the result of vector_mode_supported_p and have_regs_of_mode
13252 on a per-function basis. Thus the TYPE_MODE of a VECTOR_TYPE can
13253 change on a per-function basis. */
13254 /* ??? Possibly a better solution is to run through all the types
13255 referenced by a function and re-compute the TYPE_MODE once, rather
13256 than make the TYPE_MODE macro call a function. */
13257
13258 machine_mode
13259 vector_type_mode (const_tree t)
13260 {
13261 machine_mode mode;
13262
13263 gcc_assert (TREE_CODE (t) == VECTOR_TYPE);
13264
13265 mode = t->type_common.mode;
13266 if (VECTOR_MODE_P (mode)
13267 && (!targetm.vector_mode_supported_p (mode)
13268 || !have_regs_of_mode[mode]))
13269 {
13270 scalar_int_mode innermode;
13271
13272 /* For integers, try mapping it to a same-sized scalar mode. */
13273 if (is_int_mode (TREE_TYPE (t)->type_common.mode, &innermode))
13274 {
13275 poly_int64 size = (TYPE_VECTOR_SUBPARTS (t)
13276 * GET_MODE_BITSIZE (innermode));
13277 scalar_int_mode mode;
13278 if (int_mode_for_size (size, 0).exists (&mode)
13279 && have_regs_of_mode[mode])
13280 return mode;
13281 }
13282
13283 return BLKmode;
13284 }
13285
13286 return mode;
13287 }
13288
13289 /* Return the size in bits of each element of vector type TYPE. */
13290
13291 unsigned int
13292 vector_element_bits (const_tree type)
13293 {
13294 gcc_checking_assert (VECTOR_TYPE_P (type));
13295 if (VECTOR_BOOLEAN_TYPE_P (type))
13296 return TYPE_PRECISION (TREE_TYPE (type));
13297 return tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
13298 }
13299
13300 /* Calculate the size in bits of each element of vector type TYPE
13301 and return the result as a tree of type bitsizetype. */
13302
13303 tree
13304 vector_element_bits_tree (const_tree type)
13305 {
13306 gcc_checking_assert (VECTOR_TYPE_P (type));
13307 if (VECTOR_BOOLEAN_TYPE_P (type))
13308 return bitsize_int (vector_element_bits (type));
13309 return TYPE_SIZE (TREE_TYPE (type));
13310 }
13311
13312 /* Verify that basic properties of T match TV and thus T can be a variant of
13313 TV. TV should be the more specified variant (i.e. the main variant). */
13314
13315 static bool
13316 verify_type_variant (const_tree t, tree tv)
13317 {
13318 /* Type variant can differ by:
13319
13320 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
13321 ENCODE_QUAL_ADDR_SPACE.
13322 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
13323 in this case some values may not be set in the variant types
13324 (see TYPE_COMPLETE_P checks).
13325 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
13326 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
13327 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
13328 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
13329 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
13330 this is necessary to make it possible to merge types form different TUs
13331 - arrays, pointers and references may have TREE_TYPE that is a variant
13332 of TREE_TYPE of their main variants.
13333 - aggregates may have new TYPE_FIELDS list that list variants of
13334 the main variant TYPE_FIELDS.
13335 - vector types may differ by TYPE_VECTOR_OPAQUE
13336 */
13337
13338 /* Convenience macro for matching individual fields. */
13339 #define verify_variant_match(flag) \
13340 do { \
13341 if (flag (tv) != flag (t)) \
13342 { \
13343 error ("type variant differs by %s", #flag); \
13344 debug_tree (tv); \
13345 return false; \
13346 } \
13347 } while (false)
13348
13349 /* tree_base checks. */
13350
13351 verify_variant_match (TREE_CODE);
13352 /* FIXME: Ada builds non-artificial variants of artificial types. */
13353 #if 0
13354 if (TYPE_ARTIFICIAL (tv))
13355 verify_variant_match (TYPE_ARTIFICIAL);
13356 #endif
13357 if (POINTER_TYPE_P (tv))
13358 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
13359 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
13360 verify_variant_match (TYPE_UNSIGNED);
13361 verify_variant_match (TYPE_PACKED);
13362 if (TREE_CODE (t) == REFERENCE_TYPE)
13363 verify_variant_match (TYPE_REF_IS_RVALUE);
13364 if (AGGREGATE_TYPE_P (t))
13365 verify_variant_match (TYPE_REVERSE_STORAGE_ORDER);
13366 else
13367 verify_variant_match (TYPE_SATURATING);
13368 /* FIXME: This check trigger during libstdc++ build. */
13369 #if 0
13370 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t))
13371 verify_variant_match (TYPE_FINAL_P);
13372 #endif
13373
13374 /* tree_type_common checks. */
13375
13376 if (COMPLETE_TYPE_P (t))
13377 {
13378 verify_variant_match (TYPE_MODE);
13379 if (TREE_CODE (TYPE_SIZE (t)) != PLACEHOLDER_EXPR
13380 && TREE_CODE (TYPE_SIZE (tv)) != PLACEHOLDER_EXPR)
13381 verify_variant_match (TYPE_SIZE);
13382 if (TREE_CODE (TYPE_SIZE_UNIT (t)) != PLACEHOLDER_EXPR
13383 && TREE_CODE (TYPE_SIZE_UNIT (tv)) != PLACEHOLDER_EXPR
13384 && TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv))
13385 {
13386 gcc_assert (!operand_equal_p (TYPE_SIZE_UNIT (t),
13387 TYPE_SIZE_UNIT (tv), 0));
13388 error ("type variant has different %<TYPE_SIZE_UNIT%>");
13389 debug_tree (tv);
13390 error ("type variant%'s %<TYPE_SIZE_UNIT%>");
13391 debug_tree (TYPE_SIZE_UNIT (tv));
13392 error ("type%'s %<TYPE_SIZE_UNIT%>");
13393 debug_tree (TYPE_SIZE_UNIT (t));
13394 return false;
13395 }
13396 verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
13397 }
13398 verify_variant_match (TYPE_PRECISION);
13399 if (RECORD_OR_UNION_TYPE_P (t))
13400 verify_variant_match (TYPE_TRANSPARENT_AGGR);
13401 else if (TREE_CODE (t) == ARRAY_TYPE)
13402 verify_variant_match (TYPE_NONALIASED_COMPONENT);
13403 /* During LTO we merge variant lists from diferent translation units
13404 that may differ BY TYPE_CONTEXT that in turn may point
13405 to TRANSLATION_UNIT_DECL.
13406 Ada also builds variants of types with different TYPE_CONTEXT. */
13407 #if 0
13408 if (!in_lto_p || !TYPE_FILE_SCOPE_P (t))
13409 verify_variant_match (TYPE_CONTEXT);
13410 #endif
13411 if (TREE_CODE (t) == ARRAY_TYPE || TREE_CODE (t) == INTEGER_TYPE)
13412 verify_variant_match (TYPE_STRING_FLAG);
13413 if (TREE_CODE (t) == RECORD_TYPE || TREE_CODE (t) == UNION_TYPE)
13414 verify_variant_match (TYPE_CXX_ODR_P);
13415 if (TYPE_ALIAS_SET_KNOWN_P (t))
13416 {
13417 error ("type variant with %<TYPE_ALIAS_SET_KNOWN_P%>");
13418 debug_tree (tv);
13419 return false;
13420 }
13421
13422 /* tree_type_non_common checks. */
13423
13424 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13425 and dangle the pointer from time to time. */
13426 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
13427 && (in_lto_p || !TYPE_VFIELD (tv)
13428 || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
13429 {
13430 error ("type variant has different %<TYPE_VFIELD%>");
13431 debug_tree (tv);
13432 return false;
13433 }
13434 if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
13435 || TREE_CODE (t) == INTEGER_TYPE
13436 || TREE_CODE (t) == BOOLEAN_TYPE
13437 || TREE_CODE (t) == REAL_TYPE
13438 || TREE_CODE (t) == FIXED_POINT_TYPE)
13439 {
13440 verify_variant_match (TYPE_MAX_VALUE);
13441 verify_variant_match (TYPE_MIN_VALUE);
13442 }
13443 if (TREE_CODE (t) == METHOD_TYPE)
13444 verify_variant_match (TYPE_METHOD_BASETYPE);
13445 if (TREE_CODE (t) == OFFSET_TYPE)
13446 verify_variant_match (TYPE_OFFSET_BASETYPE);
13447 if (TREE_CODE (t) == ARRAY_TYPE)
13448 verify_variant_match (TYPE_ARRAY_MAX_SIZE);
13449 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
13450 or even type's main variant. This is needed to make bootstrap pass
13451 and the bug seems new in GCC 5.
13452 C++ FE should be updated to make this consistent and we should check
13453 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
13454 is a match with main variant.
13455
13456 Also disable the check for Java for now because of parser hack that builds
13457 first an dummy BINFO and then sometimes replace it by real BINFO in some
13458 of the copies. */
13459 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
13460 && TYPE_BINFO (t) != TYPE_BINFO (tv)
13461 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
13462 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
13463 at LTO time only. */
13464 && (in_lto_p && odr_type_p (t)))
13465 {
13466 error ("type variant has different %<TYPE_BINFO%>");
13467 debug_tree (tv);
13468 error ("type variant%'s %<TYPE_BINFO%>");
13469 debug_tree (TYPE_BINFO (tv));
13470 error ("type%'s %<TYPE_BINFO%>");
13471 debug_tree (TYPE_BINFO (t));
13472 return false;
13473 }
13474
13475 /* Check various uses of TYPE_VALUES_RAW. */
13476 if (TREE_CODE (t) == ENUMERAL_TYPE
13477 && TYPE_VALUES (t))
13478 verify_variant_match (TYPE_VALUES);
13479 else if (TREE_CODE (t) == ARRAY_TYPE)
13480 verify_variant_match (TYPE_DOMAIN);
13481 /* Permit incomplete variants of complete type. While FEs may complete
13482 all variants, this does not happen for C++ templates in all cases. */
13483 else if (RECORD_OR_UNION_TYPE_P (t)
13484 && COMPLETE_TYPE_P (t)
13485 && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
13486 {
13487 tree f1, f2;
13488
13489 /* Fortran builds qualified variants as new records with items of
13490 qualified type. Verify that they looks same. */
13491 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
13492 f1 && f2;
13493 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13494 if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
13495 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
13496 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
13497 /* FIXME: gfc_nonrestricted_type builds all types as variants
13498 with exception of pointer types. It deeply copies the type
13499 which means that we may end up with a variant type
13500 referring non-variant pointer. We may change it to
13501 produce types as variants, too, like
13502 objc_get_protocol_qualified_type does. */
13503 && !POINTER_TYPE_P (TREE_TYPE (f1)))
13504 || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
13505 || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
13506 break;
13507 if (f1 || f2)
13508 {
13509 error ("type variant has different %<TYPE_FIELDS%>");
13510 debug_tree (tv);
13511 error ("first mismatch is field");
13512 debug_tree (f1);
13513 error ("and field");
13514 debug_tree (f2);
13515 return false;
13516 }
13517 }
13518 else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE))
13519 verify_variant_match (TYPE_ARG_TYPES);
13520 /* For C++ the qualified variant of array type is really an array type
13521 of qualified TREE_TYPE.
13522 objc builds variants of pointer where pointer to type is a variant, too
13523 in objc_get_protocol_qualified_type. */
13524 if (TREE_TYPE (t) != TREE_TYPE (tv)
13525 && ((TREE_CODE (t) != ARRAY_TYPE
13526 && !POINTER_TYPE_P (t))
13527 || TYPE_MAIN_VARIANT (TREE_TYPE (t))
13528 != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
13529 {
13530 error ("type variant has different %<TREE_TYPE%>");
13531 debug_tree (tv);
13532 error ("type variant%'s %<TREE_TYPE%>");
13533 debug_tree (TREE_TYPE (tv));
13534 error ("type%'s %<TREE_TYPE%>");
13535 debug_tree (TREE_TYPE (t));
13536 return false;
13537 }
13538 if (type_with_alias_set_p (t)
13539 && !gimple_canonical_types_compatible_p (t, tv, false))
13540 {
13541 error ("type is not compatible with its variant");
13542 debug_tree (tv);
13543 error ("type variant%'s %<TREE_TYPE%>");
13544 debug_tree (TREE_TYPE (tv));
13545 error ("type%'s %<TREE_TYPE%>");
13546 debug_tree (TREE_TYPE (t));
13547 return false;
13548 }
13549 return true;
13550 #undef verify_variant_match
13551 }
13552
13553
13554 /* The TYPE_CANONICAL merging machinery. It should closely resemble
13555 the middle-end types_compatible_p function. It needs to avoid
13556 claiming types are different for types that should be treated
13557 the same with respect to TBAA. Canonical types are also used
13558 for IL consistency checks via the useless_type_conversion_p
13559 predicate which does not handle all type kinds itself but falls
13560 back to pointer-comparison of TYPE_CANONICAL for aggregates
13561 for example. */
13562
13563 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
13564 type calculation because we need to allow inter-operability between signed
13565 and unsigned variants. */
13566
13567 bool
13568 type_with_interoperable_signedness (const_tree type)
13569 {
13570 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
13571 signed char and unsigned char. Similarly fortran FE builds
13572 C_SIZE_T as signed type, while C defines it unsigned. */
13573
13574 return tree_code_for_canonical_type_merging (TREE_CODE (type))
13575 == INTEGER_TYPE
13576 && (TYPE_PRECISION (type) == TYPE_PRECISION (signed_char_type_node)
13577 || TYPE_PRECISION (type) == TYPE_PRECISION (size_type_node));
13578 }
13579
13580 /* Return true iff T1 and T2 are structurally identical for what
13581 TBAA is concerned.
13582 This function is used both by lto.cc canonical type merging and by the
13583 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
13584 that have TYPE_CANONICAL defined and assume them equivalent. This is useful
13585 only for LTO because only in these cases TYPE_CANONICAL equivalence
13586 correspond to one defined by gimple_canonical_types_compatible_p. */
13587
13588 bool
13589 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
13590 bool trust_type_canonical)
13591 {
13592 /* Type variants should be same as the main variant. When not doing sanity
13593 checking to verify this fact, go to main variants and save some work. */
13594 if (trust_type_canonical)
13595 {
13596 t1 = TYPE_MAIN_VARIANT (t1);
13597 t2 = TYPE_MAIN_VARIANT (t2);
13598 }
13599
13600 /* Check first for the obvious case of pointer identity. */
13601 if (t1 == t2)
13602 return true;
13603
13604 /* Check that we have two types to compare. */
13605 if (t1 == NULL_TREE || t2 == NULL_TREE)
13606 return false;
13607
13608 /* We consider complete types always compatible with incomplete type.
13609 This does not make sense for canonical type calculation and thus we
13610 need to ensure that we are never called on it.
13611
13612 FIXME: For more correctness the function probably should have three modes
13613 1) mode assuming that types are complete mathcing their structure
13614 2) mode allowing incomplete types but producing equivalence classes
13615 and thus ignoring all info from complete types
13616 3) mode allowing incomplete types to match complete but checking
13617 compatibility between complete types.
13618
13619 1 and 2 can be used for canonical type calculation. 3 is the real
13620 definition of type compatibility that can be used i.e. for warnings during
13621 declaration merging. */
13622
13623 gcc_assert (!trust_type_canonical
13624 || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
13625
13626 /* If the types have been previously registered and found equal
13627 they still are. */
13628
13629 if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
13630 && trust_type_canonical)
13631 {
13632 /* Do not use TYPE_CANONICAL of pointer types. For LTO streamed types
13633 they are always NULL, but they are set to non-NULL for types
13634 constructed by build_pointer_type and variants. In this case the
13635 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
13636 all pointers are considered equal. Be sure to not return false
13637 negatives. */
13638 gcc_checking_assert (canonical_type_used_p (t1)
13639 && canonical_type_used_p (t2));
13640 return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
13641 }
13642
13643 /* For types where we do ODR based TBAA the canonical type is always
13644 set correctly, so we know that types are different if their
13645 canonical types does not match. */
13646 if (trust_type_canonical
13647 && (odr_type_p (t1) && odr_based_tbaa_p (t1))
13648 != (odr_type_p (t2) && odr_based_tbaa_p (t2)))
13649 return false;
13650
13651 /* Can't be the same type if the types don't have the same code. */
13652 enum tree_code code = tree_code_for_canonical_type_merging (TREE_CODE (t1));
13653 if (code != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
13654 return false;
13655
13656 /* Qualifiers do not matter for canonical type comparison purposes. */
13657
13658 /* Void types and nullptr types are always the same. */
13659 if (TREE_CODE (t1) == VOID_TYPE
13660 || TREE_CODE (t1) == NULLPTR_TYPE)
13661 return true;
13662
13663 /* Can't be the same type if they have different mode. */
13664 if (TYPE_MODE (t1) != TYPE_MODE (t2))
13665 return false;
13666
13667 /* Non-aggregate types can be handled cheaply. */
13668 if (INTEGRAL_TYPE_P (t1)
13669 || SCALAR_FLOAT_TYPE_P (t1)
13670 || FIXED_POINT_TYPE_P (t1)
13671 || TREE_CODE (t1) == VECTOR_TYPE
13672 || TREE_CODE (t1) == COMPLEX_TYPE
13673 || TREE_CODE (t1) == OFFSET_TYPE
13674 || POINTER_TYPE_P (t1))
13675 {
13676 /* Can't be the same type if they have different recision. */
13677 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
13678 return false;
13679
13680 /* In some cases the signed and unsigned types are required to be
13681 inter-operable. */
13682 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)
13683 && !type_with_interoperable_signedness (t1))
13684 return false;
13685
13686 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
13687 interoperable with "signed char". Unless all frontends are revisited
13688 to agree on these types, we must ignore the flag completely. */
13689
13690 /* Fortran standard define C_PTR type that is compatible with every
13691 C pointer. For this reason we need to glob all pointers into one.
13692 Still pointers in different address spaces are not compatible. */
13693 if (POINTER_TYPE_P (t1))
13694 {
13695 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
13696 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
13697 return false;
13698 }
13699
13700 /* Tail-recurse to components. */
13701 if (TREE_CODE (t1) == VECTOR_TYPE
13702 || TREE_CODE (t1) == COMPLEX_TYPE)
13703 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
13704 TREE_TYPE (t2),
13705 trust_type_canonical);
13706
13707 return true;
13708 }
13709
13710 /* Do type-specific comparisons. */
13711 switch (TREE_CODE (t1))
13712 {
13713 case ARRAY_TYPE:
13714 /* Array types are the same if the element types are the same and
13715 the number of elements are the same. */
13716 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13717 trust_type_canonical)
13718 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
13719 || TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2)
13720 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
13721 return false;
13722 else
13723 {
13724 tree i1 = TYPE_DOMAIN (t1);
13725 tree i2 = TYPE_DOMAIN (t2);
13726
13727 /* For an incomplete external array, the type domain can be
13728 NULL_TREE. Check this condition also. */
13729 if (i1 == NULL_TREE && i2 == NULL_TREE)
13730 return true;
13731 else if (i1 == NULL_TREE || i2 == NULL_TREE)
13732 return false;
13733 else
13734 {
13735 tree min1 = TYPE_MIN_VALUE (i1);
13736 tree min2 = TYPE_MIN_VALUE (i2);
13737 tree max1 = TYPE_MAX_VALUE (i1);
13738 tree max2 = TYPE_MAX_VALUE (i2);
13739
13740 /* The minimum/maximum values have to be the same. */
13741 if ((min1 == min2
13742 || (min1 && min2
13743 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
13744 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
13745 || operand_equal_p (min1, min2, 0))))
13746 && (max1 == max2
13747 || (max1 && max2
13748 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
13749 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
13750 || operand_equal_p (max1, max2, 0)))))
13751 return true;
13752 else
13753 return false;
13754 }
13755 }
13756
13757 case METHOD_TYPE:
13758 case FUNCTION_TYPE:
13759 /* Function types are the same if the return type and arguments types
13760 are the same. */
13761 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13762 trust_type_canonical))
13763 return false;
13764
13765 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2)
13766 && (TYPE_NO_NAMED_ARGS_STDARG_P (t1)
13767 == TYPE_NO_NAMED_ARGS_STDARG_P (t2)))
13768 return true;
13769 else
13770 {
13771 tree parms1, parms2;
13772
13773 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
13774 parms1 && parms2;
13775 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
13776 {
13777 if (!gimple_canonical_types_compatible_p
13778 (TREE_VALUE (parms1), TREE_VALUE (parms2),
13779 trust_type_canonical))
13780 return false;
13781 }
13782
13783 if (parms1 || parms2)
13784 return false;
13785
13786 return true;
13787 }
13788
13789 case RECORD_TYPE:
13790 case UNION_TYPE:
13791 case QUAL_UNION_TYPE:
13792 {
13793 tree f1, f2;
13794
13795 /* Don't try to compare variants of an incomplete type, before
13796 TYPE_FIELDS has been copied around. */
13797 if (!COMPLETE_TYPE_P (t1) && !COMPLETE_TYPE_P (t2))
13798 return true;
13799
13800
13801 if (TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2))
13802 return false;
13803
13804 /* For aggregate types, all the fields must be the same. */
13805 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
13806 f1 || f2;
13807 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13808 {
13809 /* Skip non-fields and zero-sized fields. */
13810 while (f1 && (TREE_CODE (f1) != FIELD_DECL
13811 || (DECL_SIZE (f1)
13812 && integer_zerop (DECL_SIZE (f1)))))
13813 f1 = TREE_CHAIN (f1);
13814 while (f2 && (TREE_CODE (f2) != FIELD_DECL
13815 || (DECL_SIZE (f2)
13816 && integer_zerop (DECL_SIZE (f2)))))
13817 f2 = TREE_CHAIN (f2);
13818 if (!f1 || !f2)
13819 break;
13820 /* The fields must have the same name, offset and type. */
13821 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
13822 || !gimple_compare_field_offset (f1, f2)
13823 || !gimple_canonical_types_compatible_p
13824 (TREE_TYPE (f1), TREE_TYPE (f2),
13825 trust_type_canonical))
13826 return false;
13827 }
13828
13829 /* If one aggregate has more fields than the other, they
13830 are not the same. */
13831 if (f1 || f2)
13832 return false;
13833
13834 return true;
13835 }
13836
13837 default:
13838 /* Consider all types with language specific trees in them mutually
13839 compatible. This is executed only from verify_type and false
13840 positives can be tolerated. */
13841 gcc_assert (!in_lto_p);
13842 return true;
13843 }
13844 }
13845
13846 /* For OPAQUE_TYPE T, it should have only size and alignment information
13847 and its mode should be of class MODE_OPAQUE. This function verifies
13848 these properties of T match TV which is the main variant of T and TC
13849 which is the canonical of T. */
13850
13851 static void
13852 verify_opaque_type (const_tree t, tree tv, tree tc)
13853 {
13854 gcc_assert (OPAQUE_TYPE_P (t));
13855 gcc_assert (tv && tv == TYPE_MAIN_VARIANT (tv));
13856 gcc_assert (tc && tc == TYPE_CANONICAL (tc));
13857
13858 /* For an opaque type T1, check if some of its properties match
13859 the corresponding ones of the other opaque type T2, emit some
13860 error messages for those inconsistent ones. */
13861 auto check_properties_for_opaque_type = [](const_tree t1, tree t2,
13862 const char *kind_msg)
13863 {
13864 if (!OPAQUE_TYPE_P (t2))
13865 {
13866 error ("type %s is not an opaque type", kind_msg);
13867 debug_tree (t2);
13868 return;
13869 }
13870 if (!OPAQUE_MODE_P (TYPE_MODE (t2)))
13871 {
13872 error ("type %s is not with opaque mode", kind_msg);
13873 debug_tree (t2);
13874 return;
13875 }
13876 if (TYPE_MODE (t1) != TYPE_MODE (t2))
13877 {
13878 error ("type %s differs by %<TYPE_MODE%>", kind_msg);
13879 debug_tree (t2);
13880 return;
13881 }
13882 poly_uint64 t1_size = tree_to_poly_uint64 (TYPE_SIZE (t1));
13883 poly_uint64 t2_size = tree_to_poly_uint64 (TYPE_SIZE (t2));
13884 if (maybe_ne (t1_size, t2_size))
13885 {
13886 error ("type %s differs by %<TYPE_SIZE%>", kind_msg);
13887 debug_tree (t2);
13888 return;
13889 }
13890 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2))
13891 {
13892 error ("type %s differs by %<TYPE_ALIGN%>", kind_msg);
13893 debug_tree (t2);
13894 return;
13895 }
13896 if (TYPE_USER_ALIGN (t1) != TYPE_USER_ALIGN (t2))
13897 {
13898 error ("type %s differs by %<TYPE_USER_ALIGN%>", kind_msg);
13899 debug_tree (t2);
13900 return;
13901 }
13902 };
13903
13904 if (t != tv)
13905 check_properties_for_opaque_type (t, tv, "variant");
13906
13907 if (t != tc)
13908 check_properties_for_opaque_type (t, tc, "canonical");
13909 }
13910
13911 /* Verify type T. */
13912
13913 void
13914 verify_type (const_tree t)
13915 {
13916 bool error_found = false;
13917 tree mv = TYPE_MAIN_VARIANT (t);
13918 tree ct = TYPE_CANONICAL (t);
13919
13920 if (OPAQUE_TYPE_P (t))
13921 {
13922 verify_opaque_type (t, mv, ct);
13923 return;
13924 }
13925
13926 if (!mv)
13927 {
13928 error ("main variant is not defined");
13929 error_found = true;
13930 }
13931 else if (mv != TYPE_MAIN_VARIANT (mv))
13932 {
13933 error ("%<TYPE_MAIN_VARIANT%> has different %<TYPE_MAIN_VARIANT%>");
13934 debug_tree (mv);
13935 error_found = true;
13936 }
13937 else if (t != mv && !verify_type_variant (t, mv))
13938 error_found = true;
13939
13940 if (!ct)
13941 ;
13942 else if (TYPE_CANONICAL (ct) != ct)
13943 {
13944 error ("%<TYPE_CANONICAL%> has different %<TYPE_CANONICAL%>");
13945 debug_tree (ct);
13946 error_found = true;
13947 }
13948 /* Method and function types cannot be used to address memory and thus
13949 TYPE_CANONICAL really matters only for determining useless conversions.
13950
13951 FIXME: C++ FE produce declarations of builtin functions that are not
13952 compatible with main variants. */
13953 else if (TREE_CODE (t) == FUNCTION_TYPE)
13954 ;
13955 else if (t != ct
13956 /* FIXME: gimple_canonical_types_compatible_p cannot compare types
13957 with variably sized arrays because their sizes possibly
13958 gimplified to different variables. */
13959 && !variably_modified_type_p (ct, NULL)
13960 && !gimple_canonical_types_compatible_p (t, ct, false)
13961 && COMPLETE_TYPE_P (t))
13962 {
13963 error ("%<TYPE_CANONICAL%> is not compatible");
13964 debug_tree (ct);
13965 error_found = true;
13966 }
13967
13968 if (COMPLETE_TYPE_P (t) && TYPE_CANONICAL (t)
13969 && TYPE_MODE (t) != TYPE_MODE (TYPE_CANONICAL (t)))
13970 {
13971 error ("%<TYPE_MODE%> of %<TYPE_CANONICAL%> is not compatible");
13972 debug_tree (ct);
13973 error_found = true;
13974 }
13975 if (TYPE_MAIN_VARIANT (t) == t && ct && TYPE_MAIN_VARIANT (ct) != ct)
13976 {
13977 error ("%<TYPE_CANONICAL%> of main variant is not main variant");
13978 debug_tree (ct);
13979 debug_tree (TYPE_MAIN_VARIANT (ct));
13980 error_found = true;
13981 }
13982
13983
13984 /* Check various uses of TYPE_MIN_VALUE_RAW. */
13985 if (RECORD_OR_UNION_TYPE_P (t))
13986 {
13987 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13988 and danagle the pointer from time to time. */
13989 if (TYPE_VFIELD (t)
13990 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
13991 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
13992 {
13993 error ("%<TYPE_VFIELD%> is not %<FIELD_DECL%> nor %<TREE_LIST%>");
13994 debug_tree (TYPE_VFIELD (t));
13995 error_found = true;
13996 }
13997 }
13998 else if (TREE_CODE (t) == POINTER_TYPE)
13999 {
14000 if (TYPE_NEXT_PTR_TO (t)
14001 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
14002 {
14003 error ("%<TYPE_NEXT_PTR_TO%> is not %<POINTER_TYPE%>");
14004 debug_tree (TYPE_NEXT_PTR_TO (t));
14005 error_found = true;
14006 }
14007 }
14008 else if (TREE_CODE (t) == REFERENCE_TYPE)
14009 {
14010 if (TYPE_NEXT_REF_TO (t)
14011 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
14012 {
14013 error ("%<TYPE_NEXT_REF_TO%> is not %<REFERENCE_TYPE%>");
14014 debug_tree (TYPE_NEXT_REF_TO (t));
14015 error_found = true;
14016 }
14017 }
14018 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
14019 || TREE_CODE (t) == FIXED_POINT_TYPE)
14020 {
14021 /* FIXME: The following check should pass:
14022 useless_type_conversion_p (const_cast <tree> (t),
14023 TREE_TYPE (TYPE_MIN_VALUE (t))
14024 but does not for C sizetypes in LTO. */
14025 }
14026
14027 /* Check various uses of TYPE_MAXVAL_RAW. */
14028 if (RECORD_OR_UNION_TYPE_P (t))
14029 {
14030 if (!TYPE_BINFO (t))
14031 ;
14032 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
14033 {
14034 error ("%<TYPE_BINFO%> is not %<TREE_BINFO%>");
14035 debug_tree (TYPE_BINFO (t));
14036 error_found = true;
14037 }
14038 else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t))
14039 {
14040 error ("%<TYPE_BINFO%> type is not %<TYPE_MAIN_VARIANT%>");
14041 debug_tree (TREE_TYPE (TYPE_BINFO (t)));
14042 error_found = true;
14043 }
14044 }
14045 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
14046 {
14047 if (TYPE_METHOD_BASETYPE (t)
14048 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
14049 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
14050 {
14051 error ("%<TYPE_METHOD_BASETYPE%> is not record nor union");
14052 debug_tree (TYPE_METHOD_BASETYPE (t));
14053 error_found = true;
14054 }
14055 }
14056 else if (TREE_CODE (t) == OFFSET_TYPE)
14057 {
14058 if (TYPE_OFFSET_BASETYPE (t)
14059 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
14060 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
14061 {
14062 error ("%<TYPE_OFFSET_BASETYPE%> is not record nor union");
14063 debug_tree (TYPE_OFFSET_BASETYPE (t));
14064 error_found = true;
14065 }
14066 }
14067 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
14068 || TREE_CODE (t) == FIXED_POINT_TYPE)
14069 {
14070 /* FIXME: The following check should pass:
14071 useless_type_conversion_p (const_cast <tree> (t),
14072 TREE_TYPE (TYPE_MAX_VALUE (t))
14073 but does not for C sizetypes in LTO. */
14074 }
14075 else if (TREE_CODE (t) == ARRAY_TYPE)
14076 {
14077 if (TYPE_ARRAY_MAX_SIZE (t)
14078 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
14079 {
14080 error ("%<TYPE_ARRAY_MAX_SIZE%> not %<INTEGER_CST%>");
14081 debug_tree (TYPE_ARRAY_MAX_SIZE (t));
14082 error_found = true;
14083 }
14084 }
14085 else if (TYPE_MAX_VALUE_RAW (t))
14086 {
14087 error ("%<TYPE_MAX_VALUE_RAW%> non-NULL");
14088 debug_tree (TYPE_MAX_VALUE_RAW (t));
14089 error_found = true;
14090 }
14091
14092 if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
14093 {
14094 error ("%<TYPE_LANG_SLOT_1 (binfo)%> field is non-NULL");
14095 debug_tree (TYPE_LANG_SLOT_1 (t));
14096 error_found = true;
14097 }
14098
14099 /* Check various uses of TYPE_VALUES_RAW. */
14100 if (TREE_CODE (t) == ENUMERAL_TYPE)
14101 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
14102 {
14103 tree value = TREE_VALUE (l);
14104 tree name = TREE_PURPOSE (l);
14105
14106 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
14107 CONST_DECL of ENUMERAL TYPE. */
14108 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
14109 {
14110 error ("enum value is not %<CONST_DECL%> or %<INTEGER_CST%>");
14111 debug_tree (value);
14112 debug_tree (name);
14113 error_found = true;
14114 }
14115 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
14116 && TREE_CODE (TREE_TYPE (value)) != BOOLEAN_TYPE
14117 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
14118 {
14119 error ("enum value type is not %<INTEGER_TYPE%> nor convertible "
14120 "to the enum");
14121 debug_tree (value);
14122 debug_tree (name);
14123 error_found = true;
14124 }
14125 if (TREE_CODE (name) != IDENTIFIER_NODE)
14126 {
14127 error ("enum value name is not %<IDENTIFIER_NODE%>");
14128 debug_tree (value);
14129 debug_tree (name);
14130 error_found = true;
14131 }
14132 }
14133 else if (TREE_CODE (t) == ARRAY_TYPE)
14134 {
14135 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
14136 {
14137 error ("array %<TYPE_DOMAIN%> is not integer type");
14138 debug_tree (TYPE_DOMAIN (t));
14139 error_found = true;
14140 }
14141 }
14142 else if (RECORD_OR_UNION_TYPE_P (t))
14143 {
14144 if (TYPE_FIELDS (t) && !COMPLETE_TYPE_P (t) && in_lto_p)
14145 {
14146 error ("%<TYPE_FIELDS%> defined in incomplete type");
14147 error_found = true;
14148 }
14149 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
14150 {
14151 /* TODO: verify properties of decls. */
14152 if (TREE_CODE (fld) == FIELD_DECL)
14153 ;
14154 else if (TREE_CODE (fld) == TYPE_DECL)
14155 ;
14156 else if (TREE_CODE (fld) == CONST_DECL)
14157 ;
14158 else if (VAR_P (fld))
14159 ;
14160 else if (TREE_CODE (fld) == TEMPLATE_DECL)
14161 ;
14162 else if (TREE_CODE (fld) == USING_DECL)
14163 ;
14164 else if (TREE_CODE (fld) == FUNCTION_DECL)
14165 ;
14166 else
14167 {
14168 error ("wrong tree in %<TYPE_FIELDS%> list");
14169 debug_tree (fld);
14170 error_found = true;
14171 }
14172 }
14173 }
14174 else if (TREE_CODE (t) == INTEGER_TYPE
14175 || TREE_CODE (t) == BOOLEAN_TYPE
14176 || TREE_CODE (t) == OFFSET_TYPE
14177 || TREE_CODE (t) == REFERENCE_TYPE
14178 || TREE_CODE (t) == NULLPTR_TYPE
14179 || TREE_CODE (t) == POINTER_TYPE)
14180 {
14181 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
14182 {
14183 error ("%<TYPE_CACHED_VALUES_P%> is %i while %<TYPE_CACHED_VALUES%> "
14184 "is %p",
14185 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
14186 error_found = true;
14187 }
14188 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
14189 {
14190 error ("%<TYPE_CACHED_VALUES%> is not %<TREE_VEC%>");
14191 debug_tree (TYPE_CACHED_VALUES (t));
14192 error_found = true;
14193 }
14194 /* Verify just enough of cache to ensure that no one copied it to new type.
14195 All copying should go by copy_node that should clear it. */
14196 else if (TYPE_CACHED_VALUES_P (t))
14197 {
14198 int i;
14199 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
14200 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
14201 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
14202 {
14203 error ("wrong %<TYPE_CACHED_VALUES%> entry");
14204 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
14205 error_found = true;
14206 break;
14207 }
14208 }
14209 }
14210 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
14211 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
14212 {
14213 /* C++ FE uses TREE_PURPOSE to store initial values. */
14214 if (TREE_PURPOSE (l) && in_lto_p)
14215 {
14216 error ("%<TREE_PURPOSE%> is non-NULL in %<TYPE_ARG_TYPES%> list");
14217 debug_tree (l);
14218 error_found = true;
14219 }
14220 if (!TYPE_P (TREE_VALUE (l)))
14221 {
14222 error ("wrong entry in %<TYPE_ARG_TYPES%> list");
14223 debug_tree (l);
14224 error_found = true;
14225 }
14226 }
14227 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
14228 {
14229 error ("%<TYPE_VALUES_RAW%> field is non-NULL");
14230 debug_tree (TYPE_VALUES_RAW (t));
14231 error_found = true;
14232 }
14233 if (TREE_CODE (t) != INTEGER_TYPE
14234 && TREE_CODE (t) != BOOLEAN_TYPE
14235 && TREE_CODE (t) != OFFSET_TYPE
14236 && TREE_CODE (t) != REFERENCE_TYPE
14237 && TREE_CODE (t) != NULLPTR_TYPE
14238 && TREE_CODE (t) != POINTER_TYPE
14239 && TYPE_CACHED_VALUES_P (t))
14240 {
14241 error ("%<TYPE_CACHED_VALUES_P%> is set while it should not be");
14242 error_found = true;
14243 }
14244
14245 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
14246 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
14247 of a type. */
14248 if (TREE_CODE (t) == METHOD_TYPE
14249 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
14250 {
14251 error ("%<TYPE_METHOD_BASETYPE%> is not main variant");
14252 error_found = true;
14253 }
14254
14255 if (error_found)
14256 {
14257 debug_tree (const_cast <tree> (t));
14258 internal_error ("%qs failed", __func__);
14259 }
14260 }
14261
14262
14263 /* Return 1 if ARG interpreted as signed in its precision is known to be
14264 always positive or 2 if ARG is known to be always negative, or 3 if
14265 ARG may be positive or negative. */
14266
14267 int
14268 get_range_pos_neg (tree arg)
14269 {
14270 if (arg == error_mark_node)
14271 return 3;
14272
14273 int prec = TYPE_PRECISION (TREE_TYPE (arg));
14274 int cnt = 0;
14275 if (TREE_CODE (arg) == INTEGER_CST)
14276 {
14277 wide_int w = wi::sext (wi::to_wide (arg), prec);
14278 if (wi::neg_p (w))
14279 return 2;
14280 else
14281 return 1;
14282 }
14283 while (CONVERT_EXPR_P (arg)
14284 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
14285 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) <= prec)
14286 {
14287 arg = TREE_OPERAND (arg, 0);
14288 /* Narrower value zero extended into wider type
14289 will always result in positive values. */
14290 if (TYPE_UNSIGNED (TREE_TYPE (arg))
14291 && TYPE_PRECISION (TREE_TYPE (arg)) < prec)
14292 return 1;
14293 prec = TYPE_PRECISION (TREE_TYPE (arg));
14294 if (++cnt > 30)
14295 return 3;
14296 }
14297
14298 if (TREE_CODE (arg) != SSA_NAME)
14299 return 3;
14300 value_range r;
14301 while (!get_global_range_query ()->range_of_expr (r, arg) || r.kind () != VR_RANGE)
14302 {
14303 gimple *g = SSA_NAME_DEF_STMT (arg);
14304 if (is_gimple_assign (g)
14305 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g)))
14306 {
14307 tree t = gimple_assign_rhs1 (g);
14308 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
14309 && TYPE_PRECISION (TREE_TYPE (t)) <= prec)
14310 {
14311 if (TYPE_UNSIGNED (TREE_TYPE (t))
14312 && TYPE_PRECISION (TREE_TYPE (t)) < prec)
14313 return 1;
14314 prec = TYPE_PRECISION (TREE_TYPE (t));
14315 arg = t;
14316 if (++cnt > 30)
14317 return 3;
14318 continue;
14319 }
14320 }
14321 return 3;
14322 }
14323 if (TYPE_UNSIGNED (TREE_TYPE (arg)))
14324 {
14325 /* For unsigned values, the "positive" range comes
14326 below the "negative" range. */
14327 if (!wi::neg_p (wi::sext (r.upper_bound (), prec), SIGNED))
14328 return 1;
14329 if (wi::neg_p (wi::sext (r.lower_bound (), prec), SIGNED))
14330 return 2;
14331 }
14332 else
14333 {
14334 if (!wi::neg_p (wi::sext (r.lower_bound (), prec), SIGNED))
14335 return 1;
14336 if (wi::neg_p (wi::sext (r.upper_bound (), prec), SIGNED))
14337 return 2;
14338 }
14339 return 3;
14340 }
14341
14342
14343
14344
14345 /* Return true if ARG is marked with the nonnull attribute in the
14346 current function signature. */
14347
14348 bool
14349 nonnull_arg_p (const_tree arg)
14350 {
14351 tree t, attrs, fntype;
14352 unsigned HOST_WIDE_INT arg_num;
14353
14354 gcc_assert (TREE_CODE (arg) == PARM_DECL
14355 && (POINTER_TYPE_P (TREE_TYPE (arg))
14356 || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE));
14357
14358 /* The static chain decl is always non null. */
14359 if (arg == cfun->static_chain_decl)
14360 return true;
14361
14362 /* THIS argument of method is always non-NULL. */
14363 if (TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE
14364 && arg == DECL_ARGUMENTS (cfun->decl)
14365 && flag_delete_null_pointer_checks)
14366 return true;
14367
14368 /* Values passed by reference are always non-NULL. */
14369 if (TREE_CODE (TREE_TYPE (arg)) == REFERENCE_TYPE
14370 && flag_delete_null_pointer_checks)
14371 return true;
14372
14373 fntype = TREE_TYPE (cfun->decl);
14374 for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs))
14375 {
14376 attrs = lookup_attribute ("nonnull", attrs);
14377
14378 /* If "nonnull" wasn't specified, we know nothing about the argument. */
14379 if (attrs == NULL_TREE)
14380 return false;
14381
14382 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
14383 if (TREE_VALUE (attrs) == NULL_TREE)
14384 return true;
14385
14386 /* Get the position number for ARG in the function signature. */
14387 for (arg_num = 1, t = DECL_ARGUMENTS (cfun->decl);
14388 t;
14389 t = DECL_CHAIN (t), arg_num++)
14390 {
14391 if (t == arg)
14392 break;
14393 }
14394
14395 gcc_assert (t == arg);
14396
14397 /* Now see if ARG_NUM is mentioned in the nonnull list. */
14398 for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
14399 {
14400 if (compare_tree_int (TREE_VALUE (t), arg_num) == 0)
14401 return true;
14402 }
14403 }
14404
14405 return false;
14406 }
14407
14408 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
14409 information. */
14410
14411 location_t
14412 set_block (location_t loc, tree block)
14413 {
14414 location_t pure_loc = get_pure_location (loc);
14415 source_range src_range = get_range_from_loc (line_table, loc);
14416 unsigned discriminator = get_discriminator_from_loc (line_table, loc);
14417 return COMBINE_LOCATION_DATA (line_table, pure_loc, src_range, block, discriminator);
14418 }
14419
14420 location_t
14421 set_source_range (tree expr, location_t start, location_t finish)
14422 {
14423 source_range src_range;
14424 src_range.m_start = start;
14425 src_range.m_finish = finish;
14426 return set_source_range (expr, src_range);
14427 }
14428
14429 location_t
14430 set_source_range (tree expr, source_range src_range)
14431 {
14432 if (!EXPR_P (expr))
14433 return UNKNOWN_LOCATION;
14434
14435 location_t expr_location = EXPR_LOCATION (expr);
14436 location_t pure_loc = get_pure_location (expr_location);
14437 unsigned discriminator = get_discriminator_from_loc (expr_location);
14438 location_t adhoc = COMBINE_LOCATION_DATA (line_table,
14439 pure_loc,
14440 src_range,
14441 NULL,
14442 discriminator);
14443 SET_EXPR_LOCATION (expr, adhoc);
14444 return adhoc;
14445 }
14446
14447 /* Return EXPR, potentially wrapped with a node expression LOC,
14448 if !CAN_HAVE_LOCATION_P (expr).
14449
14450 NON_LVALUE_EXPR is used for wrapping constants, apart from STRING_CST.
14451 VIEW_CONVERT_EXPR is used for wrapping non-constants and STRING_CST.
14452
14453 Wrapper nodes can be identified using location_wrapper_p. */
14454
14455 tree
14456 maybe_wrap_with_location (tree expr, location_t loc)
14457 {
14458 if (expr == NULL)
14459 return NULL;
14460 if (loc == UNKNOWN_LOCATION)
14461 return expr;
14462 if (CAN_HAVE_LOCATION_P (expr))
14463 return expr;
14464 /* We should only be adding wrappers for constants and for decls,
14465 or for some exceptional tree nodes (e.g. BASELINK in the C++ FE). */
14466 gcc_assert (CONSTANT_CLASS_P (expr)
14467 || DECL_P (expr)
14468 || EXCEPTIONAL_CLASS_P (expr));
14469
14470 /* For now, don't add wrappers to exceptional tree nodes, to minimize
14471 any impact of the wrapper nodes. */
14472 if (EXCEPTIONAL_CLASS_P (expr) || error_operand_p (expr))
14473 return expr;
14474
14475 /* Compiler-generated temporary variables don't need a wrapper. */
14476 if (DECL_P (expr) && DECL_ARTIFICIAL (expr) && DECL_IGNORED_P (expr))
14477 return expr;
14478
14479 /* If any auto_suppress_location_wrappers are active, don't create
14480 wrappers. */
14481 if (suppress_location_wrappers > 0)
14482 return expr;
14483
14484 tree_code code
14485 = (((CONSTANT_CLASS_P (expr) && TREE_CODE (expr) != STRING_CST)
14486 || (TREE_CODE (expr) == CONST_DECL && !TREE_STATIC (expr)))
14487 ? NON_LVALUE_EXPR : VIEW_CONVERT_EXPR);
14488 tree wrapper = build1_loc (loc, code, TREE_TYPE (expr), expr);
14489 /* Mark this node as being a wrapper. */
14490 EXPR_LOCATION_WRAPPER_P (wrapper) = 1;
14491 return wrapper;
14492 }
14493
14494 int suppress_location_wrappers;
14495
14496 /* Return the name of combined function FN, for debugging purposes. */
14497
14498 const char *
14499 combined_fn_name (combined_fn fn)
14500 {
14501 if (builtin_fn_p (fn))
14502 {
14503 tree fndecl = builtin_decl_explicit (as_builtin_fn (fn));
14504 return IDENTIFIER_POINTER (DECL_NAME (fndecl));
14505 }
14506 else
14507 return internal_fn_name (as_internal_fn (fn));
14508 }
14509
14510 /* Return a bitmap with a bit set corresponding to each argument in
14511 a function call type FNTYPE declared with attribute nonnull,
14512 or null if none of the function's argument are nonnull. The caller
14513 must free the bitmap. */
14514
14515 bitmap
14516 get_nonnull_args (const_tree fntype)
14517 {
14518 if (fntype == NULL_TREE)
14519 return NULL;
14520
14521 bitmap argmap = NULL;
14522 if (TREE_CODE (fntype) == METHOD_TYPE)
14523 {
14524 /* The this pointer in C++ non-static member functions is
14525 implicitly nonnull whether or not it's declared as such. */
14526 argmap = BITMAP_ALLOC (NULL);
14527 bitmap_set_bit (argmap, 0);
14528 }
14529
14530 tree attrs = TYPE_ATTRIBUTES (fntype);
14531 if (!attrs)
14532 return argmap;
14533
14534 /* A function declaration can specify multiple attribute nonnull,
14535 each with zero or more arguments. The loop below creates a bitmap
14536 representing a union of all the arguments. An empty (but non-null)
14537 bitmap means that all arguments have been declaraed nonnull. */
14538 for ( ; attrs; attrs = TREE_CHAIN (attrs))
14539 {
14540 attrs = lookup_attribute ("nonnull", attrs);
14541 if (!attrs)
14542 break;
14543
14544 if (!argmap)
14545 argmap = BITMAP_ALLOC (NULL);
14546
14547 if (!TREE_VALUE (attrs))
14548 {
14549 /* Clear the bitmap in case a previous attribute nonnull
14550 set it and this one overrides it for all arguments. */
14551 bitmap_clear (argmap);
14552 return argmap;
14553 }
14554
14555 /* Iterate over the indices of the format arguments declared nonnull
14556 and set a bit for each. */
14557 for (tree idx = TREE_VALUE (attrs); idx; idx = TREE_CHAIN (idx))
14558 {
14559 unsigned int val = TREE_INT_CST_LOW (TREE_VALUE (idx)) - 1;
14560 bitmap_set_bit (argmap, val);
14561 }
14562 }
14563
14564 return argmap;
14565 }
14566
14567 /* Returns true if TYPE is a type where it and all of its subobjects
14568 (recursively) are of structure, union, or array type. */
14569
14570 bool
14571 is_empty_type (const_tree type)
14572 {
14573 if (RECORD_OR_UNION_TYPE_P (type))
14574 {
14575 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
14576 if (TREE_CODE (field) == FIELD_DECL
14577 && !DECL_PADDING_P (field)
14578 && !is_empty_type (TREE_TYPE (field)))
14579 return false;
14580 return true;
14581 }
14582 else if (TREE_CODE (type) == ARRAY_TYPE)
14583 return (integer_minus_onep (array_type_nelts (type))
14584 || TYPE_DOMAIN (type) == NULL_TREE
14585 || is_empty_type (TREE_TYPE (type)));
14586 return false;
14587 }
14588
14589 /* Implement TARGET_EMPTY_RECORD_P. Return true if TYPE is an empty type
14590 that shouldn't be passed via stack. */
14591
14592 bool
14593 default_is_empty_record (const_tree type)
14594 {
14595 if (!abi_version_at_least (12))
14596 return false;
14597
14598 if (type == error_mark_node)
14599 return false;
14600
14601 if (TREE_ADDRESSABLE (type))
14602 return false;
14603
14604 return is_empty_type (TYPE_MAIN_VARIANT (type));
14605 }
14606
14607 /* Determine whether TYPE is a structure with a flexible array member,
14608 or a union containing such a structure (possibly recursively). */
14609
14610 bool
14611 flexible_array_type_p (const_tree type)
14612 {
14613 tree x, last;
14614 switch (TREE_CODE (type))
14615 {
14616 case RECORD_TYPE:
14617 last = NULL_TREE;
14618 for (x = TYPE_FIELDS (type); x != NULL_TREE; x = DECL_CHAIN (x))
14619 if (TREE_CODE (x) == FIELD_DECL)
14620 last = x;
14621 if (last == NULL_TREE)
14622 return false;
14623 if (TREE_CODE (TREE_TYPE (last)) == ARRAY_TYPE
14624 && TYPE_SIZE (TREE_TYPE (last)) == NULL_TREE
14625 && TYPE_DOMAIN (TREE_TYPE (last)) != NULL_TREE
14626 && TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (last))) == NULL_TREE)
14627 return true;
14628 return false;
14629 case UNION_TYPE:
14630 for (x = TYPE_FIELDS (type); x != NULL_TREE; x = DECL_CHAIN (x))
14631 {
14632 if (TREE_CODE (x) == FIELD_DECL
14633 && flexible_array_type_p (TREE_TYPE (x)))
14634 return true;
14635 }
14636 return false;
14637 default:
14638 return false;
14639 }
14640 }
14641
14642 /* Like int_size_in_bytes, but handle empty records specially. */
14643
14644 HOST_WIDE_INT
14645 arg_int_size_in_bytes (const_tree type)
14646 {
14647 return TYPE_EMPTY_P (type) ? 0 : int_size_in_bytes (type);
14648 }
14649
14650 /* Like size_in_bytes, but handle empty records specially. */
14651
14652 tree
14653 arg_size_in_bytes (const_tree type)
14654 {
14655 return TYPE_EMPTY_P (type) ? size_zero_node : size_in_bytes (type);
14656 }
14657
14658 /* Return true if an expression with CODE has to have the same result type as
14659 its first operand. */
14660
14661 bool
14662 expr_type_first_operand_type_p (tree_code code)
14663 {
14664 switch (code)
14665 {
14666 case NEGATE_EXPR:
14667 case ABS_EXPR:
14668 case BIT_NOT_EXPR:
14669 case PAREN_EXPR:
14670 case CONJ_EXPR:
14671
14672 case PLUS_EXPR:
14673 case MINUS_EXPR:
14674 case MULT_EXPR:
14675 case TRUNC_DIV_EXPR:
14676 case CEIL_DIV_EXPR:
14677 case FLOOR_DIV_EXPR:
14678 case ROUND_DIV_EXPR:
14679 case TRUNC_MOD_EXPR:
14680 case CEIL_MOD_EXPR:
14681 case FLOOR_MOD_EXPR:
14682 case ROUND_MOD_EXPR:
14683 case RDIV_EXPR:
14684 case EXACT_DIV_EXPR:
14685 case MIN_EXPR:
14686 case MAX_EXPR:
14687 case BIT_IOR_EXPR:
14688 case BIT_XOR_EXPR:
14689 case BIT_AND_EXPR:
14690
14691 case LSHIFT_EXPR:
14692 case RSHIFT_EXPR:
14693 case LROTATE_EXPR:
14694 case RROTATE_EXPR:
14695 return true;
14696
14697 default:
14698 return false;
14699 }
14700 }
14701
14702 /* Return a typenode for the "standard" C type with a given name. */
14703 tree
14704 get_typenode_from_name (const char *name)
14705 {
14706 if (name == NULL || *name == '\0')
14707 return NULL_TREE;
14708
14709 if (strcmp (name, "char") == 0)
14710 return char_type_node;
14711 if (strcmp (name, "unsigned char") == 0)
14712 return unsigned_char_type_node;
14713 if (strcmp (name, "signed char") == 0)
14714 return signed_char_type_node;
14715
14716 if (strcmp (name, "short int") == 0)
14717 return short_integer_type_node;
14718 if (strcmp (name, "short unsigned int") == 0)
14719 return short_unsigned_type_node;
14720
14721 if (strcmp (name, "int") == 0)
14722 return integer_type_node;
14723 if (strcmp (name, "unsigned int") == 0)
14724 return unsigned_type_node;
14725
14726 if (strcmp (name, "long int") == 0)
14727 return long_integer_type_node;
14728 if (strcmp (name, "long unsigned int") == 0)
14729 return long_unsigned_type_node;
14730
14731 if (strcmp (name, "long long int") == 0)
14732 return long_long_integer_type_node;
14733 if (strcmp (name, "long long unsigned int") == 0)
14734 return long_long_unsigned_type_node;
14735
14736 gcc_unreachable ();
14737 }
14738
14739 /* List of pointer types used to declare builtins before we have seen their
14740 real declaration.
14741
14742 Keep the size up to date in tree.h ! */
14743 const builtin_structptr_type builtin_structptr_types[6] =
14744 {
14745 { fileptr_type_node, ptr_type_node, "FILE" },
14746 { const_tm_ptr_type_node, const_ptr_type_node, "tm" },
14747 { fenv_t_ptr_type_node, ptr_type_node, "fenv_t" },
14748 { const_fenv_t_ptr_type_node, const_ptr_type_node, "fenv_t" },
14749 { fexcept_t_ptr_type_node, ptr_type_node, "fexcept_t" },
14750 { const_fexcept_t_ptr_type_node, const_ptr_type_node, "fexcept_t" }
14751 };
14752
14753 /* Return the maximum object size. */
14754
14755 tree
14756 max_object_size (void)
14757 {
14758 /* To do: Make this a configurable parameter. */
14759 return TYPE_MAX_VALUE (ptrdiff_type_node);
14760 }
14761
14762 /* A wrapper around TARGET_VERIFY_TYPE_CONTEXT that makes the silent_p
14763 parameter default to false and that weeds out error_mark_node. */
14764
14765 bool
14766 verify_type_context (location_t loc, type_context_kind context,
14767 const_tree type, bool silent_p)
14768 {
14769 if (type == error_mark_node)
14770 return true;
14771
14772 gcc_assert (TYPE_P (type));
14773 return (!targetm.verify_type_context
14774 || targetm.verify_type_context (loc, context, type, silent_p));
14775 }
14776
14777 /* Return true if NEW_ASM and DELETE_ASM name a valid pair of new and
14778 delete operators. Return false if they may or may not name such
14779 a pair and, when nonnull, set *PCERTAIN to true if they certainly
14780 do not. */
14781
14782 bool
14783 valid_new_delete_pair_p (tree new_asm, tree delete_asm,
14784 bool *pcertain /* = NULL */)
14785 {
14786 bool certain;
14787 if (!pcertain)
14788 pcertain = &certain;
14789
14790 const char *new_name = IDENTIFIER_POINTER (new_asm);
14791 const char *delete_name = IDENTIFIER_POINTER (delete_asm);
14792 unsigned int new_len = IDENTIFIER_LENGTH (new_asm);
14793 unsigned int delete_len = IDENTIFIER_LENGTH (delete_asm);
14794
14795 /* The following failures are due to invalid names so they're not
14796 considered certain mismatches. */
14797 *pcertain = false;
14798
14799 if (new_len < 5 || delete_len < 6)
14800 return false;
14801 if (new_name[0] == '_')
14802 ++new_name, --new_len;
14803 if (new_name[0] == '_')
14804 ++new_name, --new_len;
14805 if (delete_name[0] == '_')
14806 ++delete_name, --delete_len;
14807 if (delete_name[0] == '_')
14808 ++delete_name, --delete_len;
14809 if (new_len < 4 || delete_len < 5)
14810 return false;
14811
14812 /* The following failures are due to names of user-defined operators
14813 so they're also not considered certain mismatches. */
14814
14815 /* *_len is now just the length after initial underscores. */
14816 if (new_name[0] != 'Z' || new_name[1] != 'n')
14817 return false;
14818 if (delete_name[0] != 'Z' || delete_name[1] != 'd')
14819 return false;
14820
14821 /* The following failures are certain mismatches. */
14822 *pcertain = true;
14823
14824 /* _Znw must match _Zdl, _Zna must match _Zda. */
14825 if ((new_name[2] != 'w' || delete_name[2] != 'l')
14826 && (new_name[2] != 'a' || delete_name[2] != 'a'))
14827 return false;
14828 /* 'j', 'm' and 'y' correspond to size_t. */
14829 if (new_name[3] != 'j' && new_name[3] != 'm' && new_name[3] != 'y')
14830 return false;
14831 if (delete_name[3] != 'P' || delete_name[4] != 'v')
14832 return false;
14833 if (new_len == 4
14834 || (new_len == 18 && !memcmp (new_name + 4, "RKSt9nothrow_t", 14)))
14835 {
14836 /* _ZnXY or _ZnXYRKSt9nothrow_t matches
14837 _ZdXPv, _ZdXPvY and _ZdXPvRKSt9nothrow_t. */
14838 if (delete_len == 5)
14839 return true;
14840 if (delete_len == 6 && delete_name[5] == new_name[3])
14841 return true;
14842 if (delete_len == 19 && !memcmp (delete_name + 5, "RKSt9nothrow_t", 14))
14843 return true;
14844 }
14845 else if ((new_len == 19 && !memcmp (new_name + 4, "St11align_val_t", 15))
14846 || (new_len == 33
14847 && !memcmp (new_name + 4, "St11align_val_tRKSt9nothrow_t", 29)))
14848 {
14849 /* _ZnXYSt11align_val_t or _ZnXYSt11align_val_tRKSt9nothrow_t matches
14850 _ZdXPvSt11align_val_t or _ZdXPvYSt11align_val_t or or
14851 _ZdXPvSt11align_val_tRKSt9nothrow_t. */
14852 if (delete_len == 20 && !memcmp (delete_name + 5, "St11align_val_t", 15))
14853 return true;
14854 if (delete_len == 21
14855 && delete_name[5] == new_name[3]
14856 && !memcmp (delete_name + 6, "St11align_val_t", 15))
14857 return true;
14858 if (delete_len == 34
14859 && !memcmp (delete_name + 5, "St11align_val_tRKSt9nothrow_t", 29))
14860 return true;
14861 }
14862
14863 /* The negative result is conservative. */
14864 *pcertain = false;
14865 return false;
14866 }
14867
14868 /* Return the zero-based number corresponding to the argument being
14869 deallocated if FNDECL is a deallocation function or an out-of-bounds
14870 value if it isn't. */
14871
14872 unsigned
14873 fndecl_dealloc_argno (tree fndecl)
14874 {
14875 /* A call to operator delete isn't recognized as one to a built-in. */
14876 if (DECL_IS_OPERATOR_DELETE_P (fndecl))
14877 {
14878 if (DECL_IS_REPLACEABLE_OPERATOR (fndecl))
14879 return 0;
14880
14881 /* Avoid placement delete that's not been inlined. */
14882 tree fname = DECL_ASSEMBLER_NAME (fndecl);
14883 if (id_equal (fname, "_ZdlPvS_") // ordinary form
14884 || id_equal (fname, "_ZdaPvS_")) // array form
14885 return UINT_MAX;
14886 return 0;
14887 }
14888
14889 /* TODO: Handle user-defined functions with attribute malloc? Handle
14890 known non-built-ins like fopen? */
14891 if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
14892 {
14893 switch (DECL_FUNCTION_CODE (fndecl))
14894 {
14895 case BUILT_IN_FREE:
14896 case BUILT_IN_REALLOC:
14897 return 0;
14898 default:
14899 break;
14900 }
14901 return UINT_MAX;
14902 }
14903
14904 tree attrs = DECL_ATTRIBUTES (fndecl);
14905 if (!attrs)
14906 return UINT_MAX;
14907
14908 for (tree atfree = attrs;
14909 (atfree = lookup_attribute ("*dealloc", atfree));
14910 atfree = TREE_CHAIN (atfree))
14911 {
14912 tree alloc = TREE_VALUE (atfree);
14913 if (!alloc)
14914 continue;
14915
14916 tree pos = TREE_CHAIN (alloc);
14917 if (!pos)
14918 return 0;
14919
14920 pos = TREE_VALUE (pos);
14921 return TREE_INT_CST_LOW (pos) - 1;
14922 }
14923
14924 return UINT_MAX;
14925 }
14926
14927 /* If EXPR refers to a character array or pointer declared attribute
14928 nonstring, return a decl for that array or pointer and set *REF
14929 to the referenced enclosing object or pointer. Otherwise return
14930 null. */
14931
14932 tree
14933 get_attr_nonstring_decl (tree expr, tree *ref)
14934 {
14935 tree decl = expr;
14936 tree var = NULL_TREE;
14937 if (TREE_CODE (decl) == SSA_NAME)
14938 {
14939 gimple *def = SSA_NAME_DEF_STMT (decl);
14940
14941 if (is_gimple_assign (def))
14942 {
14943 tree_code code = gimple_assign_rhs_code (def);
14944 if (code == ADDR_EXPR
14945 || code == COMPONENT_REF
14946 || code == VAR_DECL)
14947 decl = gimple_assign_rhs1 (def);
14948 }
14949 else
14950 var = SSA_NAME_VAR (decl);
14951 }
14952
14953 if (TREE_CODE (decl) == ADDR_EXPR)
14954 decl = TREE_OPERAND (decl, 0);
14955
14956 /* To simplify calling code, store the referenced DECL regardless of
14957 the attribute determined below, but avoid storing the SSA_NAME_VAR
14958 obtained above (it's not useful for dataflow purposes). */
14959 if (ref)
14960 *ref = decl;
14961
14962 /* Use the SSA_NAME_VAR that was determined above to see if it's
14963 declared nonstring. Otherwise drill down into the referenced
14964 DECL. */
14965 if (var)
14966 decl = var;
14967 else if (TREE_CODE (decl) == ARRAY_REF)
14968 decl = TREE_OPERAND (decl, 0);
14969 else if (TREE_CODE (decl) == COMPONENT_REF)
14970 decl = TREE_OPERAND (decl, 1);
14971 else if (TREE_CODE (decl) == MEM_REF)
14972 return get_attr_nonstring_decl (TREE_OPERAND (decl, 0), ref);
14973
14974 if (DECL_P (decl)
14975 && lookup_attribute ("nonstring", DECL_ATTRIBUTES (decl)))
14976 return decl;
14977
14978 return NULL_TREE;
14979 }
14980
14981 /* Return length of attribute names string,
14982 if arglist chain > 1, -1 otherwise. */
14983
14984 int
14985 get_target_clone_attr_len (tree arglist)
14986 {
14987 tree arg;
14988 int str_len_sum = 0;
14989 int argnum = 0;
14990
14991 for (arg = arglist; arg; arg = TREE_CHAIN (arg))
14992 {
14993 const char *str = TREE_STRING_POINTER (TREE_VALUE (arg));
14994 size_t len = strlen (str);
14995 str_len_sum += len + 1;
14996 for (const char *p = strchr (str, ','); p; p = strchr (p + 1, ','))
14997 argnum++;
14998 argnum++;
14999 }
15000 if (argnum <= 1)
15001 return -1;
15002 return str_len_sum;
15003 }
15004
15005 void
15006 tree_cc_finalize (void)
15007 {
15008 clear_nonstandard_integer_type_cache ();
15009 }
15010
15011 #if CHECKING_P
15012
15013 namespace selftest {
15014
15015 /* Selftests for tree. */
15016
15017 /* Verify that integer constants are sane. */
15018
15019 static void
15020 test_integer_constants ()
15021 {
15022 ASSERT_TRUE (integer_type_node != NULL);
15023 ASSERT_TRUE (build_int_cst (integer_type_node, 0) != NULL);
15024
15025 tree type = integer_type_node;
15026
15027 tree zero = build_zero_cst (type);
15028 ASSERT_EQ (INTEGER_CST, TREE_CODE (zero));
15029 ASSERT_EQ (type, TREE_TYPE (zero));
15030
15031 tree one = build_int_cst (type, 1);
15032 ASSERT_EQ (INTEGER_CST, TREE_CODE (one));
15033 ASSERT_EQ (type, TREE_TYPE (zero));
15034 }
15035
15036 /* Verify identifiers. */
15037
15038 static void
15039 test_identifiers ()
15040 {
15041 tree identifier = get_identifier ("foo");
15042 ASSERT_EQ (3, IDENTIFIER_LENGTH (identifier));
15043 ASSERT_STREQ ("foo", IDENTIFIER_POINTER (identifier));
15044 }
15045
15046 /* Verify LABEL_DECL. */
15047
15048 static void
15049 test_labels ()
15050 {
15051 tree identifier = get_identifier ("err");
15052 tree label_decl = build_decl (UNKNOWN_LOCATION, LABEL_DECL,
15053 identifier, void_type_node);
15054 ASSERT_EQ (-1, LABEL_DECL_UID (label_decl));
15055 ASSERT_FALSE (FORCED_LABEL (label_decl));
15056 }
15057
15058 /* Return a new VECTOR_CST node whose type is TYPE and whose values
15059 are given by VALS. */
15060
15061 static tree
15062 build_vector (tree type, const vec<tree> &vals MEM_STAT_DECL)
15063 {
15064 gcc_assert (known_eq (vals.length (), TYPE_VECTOR_SUBPARTS (type)));
15065 tree_vector_builder builder (type, vals.length (), 1);
15066 builder.splice (vals);
15067 return builder.build ();
15068 }
15069
15070 /* Check that VECTOR_CST ACTUAL contains the elements in EXPECTED. */
15071
15072 static void
15073 check_vector_cst (const vec<tree> &expected, tree actual)
15074 {
15075 ASSERT_KNOWN_EQ (expected.length (),
15076 TYPE_VECTOR_SUBPARTS (TREE_TYPE (actual)));
15077 for (unsigned int i = 0; i < expected.length (); ++i)
15078 ASSERT_EQ (wi::to_wide (expected[i]),
15079 wi::to_wide (vector_cst_elt (actual, i)));
15080 }
15081
15082 /* Check that VECTOR_CST ACTUAL contains NPATTERNS duplicated elements,
15083 and that its elements match EXPECTED. */
15084
15085 static void
15086 check_vector_cst_duplicate (const vec<tree> &expected, tree actual,
15087 unsigned int npatterns)
15088 {
15089 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15090 ASSERT_EQ (1, VECTOR_CST_NELTS_PER_PATTERN (actual));
15091 ASSERT_EQ (npatterns, vector_cst_encoded_nelts (actual));
15092 ASSERT_TRUE (VECTOR_CST_DUPLICATE_P (actual));
15093 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
15094 check_vector_cst (expected, actual);
15095 }
15096
15097 /* Check that VECTOR_CST ACTUAL contains NPATTERNS foreground elements
15098 and NPATTERNS background elements, and that its elements match
15099 EXPECTED. */
15100
15101 static void
15102 check_vector_cst_fill (const vec<tree> &expected, tree actual,
15103 unsigned int npatterns)
15104 {
15105 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15106 ASSERT_EQ (2, VECTOR_CST_NELTS_PER_PATTERN (actual));
15107 ASSERT_EQ (2 * npatterns, vector_cst_encoded_nelts (actual));
15108 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
15109 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
15110 check_vector_cst (expected, actual);
15111 }
15112
15113 /* Check that VECTOR_CST ACTUAL contains NPATTERNS stepped patterns,
15114 and that its elements match EXPECTED. */
15115
15116 static void
15117 check_vector_cst_stepped (const vec<tree> &expected, tree actual,
15118 unsigned int npatterns)
15119 {
15120 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15121 ASSERT_EQ (3, VECTOR_CST_NELTS_PER_PATTERN (actual));
15122 ASSERT_EQ (3 * npatterns, vector_cst_encoded_nelts (actual));
15123 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
15124 ASSERT_TRUE (VECTOR_CST_STEPPED_P (actual));
15125 check_vector_cst (expected, actual);
15126 }
15127
15128 /* Test the creation of VECTOR_CSTs. */
15129
15130 static void
15131 test_vector_cst_patterns (ALONE_CXX_MEM_STAT_INFO)
15132 {
15133 auto_vec<tree, 8> elements (8);
15134 elements.quick_grow (8);
15135 tree element_type = build_nonstandard_integer_type (16, true);
15136 tree vector_type = build_vector_type (element_type, 8);
15137
15138 /* Test a simple linear series with a base of 0 and a step of 1:
15139 { 0, 1, 2, 3, 4, 5, 6, 7 }. */
15140 for (unsigned int i = 0; i < 8; ++i)
15141 elements[i] = build_int_cst (element_type, i);
15142 tree vector = build_vector (vector_type, elements PASS_MEM_STAT);
15143 check_vector_cst_stepped (elements, vector, 1);
15144
15145 /* Try the same with the first element replaced by 100:
15146 { 100, 1, 2, 3, 4, 5, 6, 7 }. */
15147 elements[0] = build_int_cst (element_type, 100);
15148 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15149 check_vector_cst_stepped (elements, vector, 1);
15150
15151 /* Try a series that wraps around.
15152 { 100, 65531, 65532, 65533, 65534, 65535, 0, 1 }. */
15153 for (unsigned int i = 1; i < 8; ++i)
15154 elements[i] = build_int_cst (element_type, (65530 + i) & 0xffff);
15155 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15156 check_vector_cst_stepped (elements, vector, 1);
15157
15158 /* Try a downward series:
15159 { 100, 79, 78, 77, 76, 75, 75, 73 }. */
15160 for (unsigned int i = 1; i < 8; ++i)
15161 elements[i] = build_int_cst (element_type, 80 - i);
15162 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15163 check_vector_cst_stepped (elements, vector, 1);
15164
15165 /* Try two interleaved series with different bases and steps:
15166 { 100, 53, 66, 206, 62, 212, 58, 218 }. */
15167 elements[1] = build_int_cst (element_type, 53);
15168 for (unsigned int i = 2; i < 8; i += 2)
15169 {
15170 elements[i] = build_int_cst (element_type, 70 - i * 2);
15171 elements[i + 1] = build_int_cst (element_type, 200 + i * 3);
15172 }
15173 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15174 check_vector_cst_stepped (elements, vector, 2);
15175
15176 /* Try a duplicated value:
15177 { 100, 100, 100, 100, 100, 100, 100, 100 }. */
15178 for (unsigned int i = 1; i < 8; ++i)
15179 elements[i] = elements[0];
15180 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15181 check_vector_cst_duplicate (elements, vector, 1);
15182
15183 /* Try an interleaved duplicated value:
15184 { 100, 55, 100, 55, 100, 55, 100, 55 }. */
15185 elements[1] = build_int_cst (element_type, 55);
15186 for (unsigned int i = 2; i < 8; ++i)
15187 elements[i] = elements[i - 2];
15188 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15189 check_vector_cst_duplicate (elements, vector, 2);
15190
15191 /* Try a duplicated value with 2 exceptions
15192 { 41, 97, 100, 55, 100, 55, 100, 55 }. */
15193 elements[0] = build_int_cst (element_type, 41);
15194 elements[1] = build_int_cst (element_type, 97);
15195 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15196 check_vector_cst_fill (elements, vector, 2);
15197
15198 /* Try with and without a step
15199 { 41, 97, 100, 21, 100, 35, 100, 49 }. */
15200 for (unsigned int i = 3; i < 8; i += 2)
15201 elements[i] = build_int_cst (element_type, i * 7);
15202 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15203 check_vector_cst_stepped (elements, vector, 2);
15204
15205 /* Try a fully-general constant:
15206 { 41, 97, 100, 21, 100, 9990, 100, 49 }. */
15207 elements[5] = build_int_cst (element_type, 9990);
15208 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15209 check_vector_cst_fill (elements, vector, 4);
15210 }
15211
15212 /* Verify that STRIP_NOPS (NODE) is EXPECTED.
15213 Helper function for test_location_wrappers, to deal with STRIP_NOPS
15214 modifying its argument in-place. */
15215
15216 static void
15217 check_strip_nops (tree node, tree expected)
15218 {
15219 STRIP_NOPS (node);
15220 ASSERT_EQ (expected, node);
15221 }
15222
15223 /* Verify location wrappers. */
15224
15225 static void
15226 test_location_wrappers ()
15227 {
15228 location_t loc = BUILTINS_LOCATION;
15229
15230 ASSERT_EQ (NULL_TREE, maybe_wrap_with_location (NULL_TREE, loc));
15231
15232 /* Wrapping a constant. */
15233 tree int_cst = build_int_cst (integer_type_node, 42);
15234 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_cst));
15235 ASSERT_FALSE (location_wrapper_p (int_cst));
15236
15237 tree wrapped_int_cst = maybe_wrap_with_location (int_cst, loc);
15238 ASSERT_TRUE (location_wrapper_p (wrapped_int_cst));
15239 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_cst));
15240 ASSERT_EQ (int_cst, tree_strip_any_location_wrapper (wrapped_int_cst));
15241
15242 /* We shouldn't add wrapper nodes for UNKNOWN_LOCATION. */
15243 ASSERT_EQ (int_cst, maybe_wrap_with_location (int_cst, UNKNOWN_LOCATION));
15244
15245 /* We shouldn't add wrapper nodes for nodes that CAN_HAVE_LOCATION_P. */
15246 tree cast = build1 (NOP_EXPR, char_type_node, int_cst);
15247 ASSERT_TRUE (CAN_HAVE_LOCATION_P (cast));
15248 ASSERT_EQ (cast, maybe_wrap_with_location (cast, loc));
15249
15250 /* Wrapping a STRING_CST. */
15251 tree string_cst = build_string (4, "foo");
15252 ASSERT_FALSE (CAN_HAVE_LOCATION_P (string_cst));
15253 ASSERT_FALSE (location_wrapper_p (string_cst));
15254
15255 tree wrapped_string_cst = maybe_wrap_with_location (string_cst, loc);
15256 ASSERT_TRUE (location_wrapper_p (wrapped_string_cst));
15257 ASSERT_EQ (VIEW_CONVERT_EXPR, TREE_CODE (wrapped_string_cst));
15258 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_string_cst));
15259 ASSERT_EQ (string_cst, tree_strip_any_location_wrapper (wrapped_string_cst));
15260
15261
15262 /* Wrapping a variable. */
15263 tree int_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
15264 get_identifier ("some_int_var"),
15265 integer_type_node);
15266 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_var));
15267 ASSERT_FALSE (location_wrapper_p (int_var));
15268
15269 tree wrapped_int_var = maybe_wrap_with_location (int_var, loc);
15270 ASSERT_TRUE (location_wrapper_p (wrapped_int_var));
15271 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_var));
15272 ASSERT_EQ (int_var, tree_strip_any_location_wrapper (wrapped_int_var));
15273
15274 /* Verify that "reinterpret_cast<int>(some_int_var)" is not a location
15275 wrapper. */
15276 tree r_cast = build1 (NON_LVALUE_EXPR, integer_type_node, int_var);
15277 ASSERT_FALSE (location_wrapper_p (r_cast));
15278 ASSERT_EQ (r_cast, tree_strip_any_location_wrapper (r_cast));
15279
15280 /* Verify that STRIP_NOPS removes wrappers. */
15281 check_strip_nops (wrapped_int_cst, int_cst);
15282 check_strip_nops (wrapped_string_cst, string_cst);
15283 check_strip_nops (wrapped_int_var, int_var);
15284 }
15285
15286 /* Test various tree predicates. Verify that location wrappers don't
15287 affect the results. */
15288
15289 static void
15290 test_predicates ()
15291 {
15292 /* Build various constants and wrappers around them. */
15293
15294 location_t loc = BUILTINS_LOCATION;
15295
15296 tree i_0 = build_int_cst (integer_type_node, 0);
15297 tree wr_i_0 = maybe_wrap_with_location (i_0, loc);
15298
15299 tree i_1 = build_int_cst (integer_type_node, 1);
15300 tree wr_i_1 = maybe_wrap_with_location (i_1, loc);
15301
15302 tree i_m1 = build_int_cst (integer_type_node, -1);
15303 tree wr_i_m1 = maybe_wrap_with_location (i_m1, loc);
15304
15305 tree f_0 = build_real_from_int_cst (float_type_node, i_0);
15306 tree wr_f_0 = maybe_wrap_with_location (f_0, loc);
15307 tree f_1 = build_real_from_int_cst (float_type_node, i_1);
15308 tree wr_f_1 = maybe_wrap_with_location (f_1, loc);
15309 tree f_m1 = build_real_from_int_cst (float_type_node, i_m1);
15310 tree wr_f_m1 = maybe_wrap_with_location (f_m1, loc);
15311
15312 tree c_i_0 = build_complex (NULL_TREE, i_0, i_0);
15313 tree c_i_1 = build_complex (NULL_TREE, i_1, i_0);
15314 tree c_i_m1 = build_complex (NULL_TREE, i_m1, i_0);
15315
15316 tree c_f_0 = build_complex (NULL_TREE, f_0, f_0);
15317 tree c_f_1 = build_complex (NULL_TREE, f_1, f_0);
15318 tree c_f_m1 = build_complex (NULL_TREE, f_m1, f_0);
15319
15320 /* TODO: vector constants. */
15321
15322 /* Test integer_onep. */
15323 ASSERT_FALSE (integer_onep (i_0));
15324 ASSERT_FALSE (integer_onep (wr_i_0));
15325 ASSERT_TRUE (integer_onep (i_1));
15326 ASSERT_TRUE (integer_onep (wr_i_1));
15327 ASSERT_FALSE (integer_onep (i_m1));
15328 ASSERT_FALSE (integer_onep (wr_i_m1));
15329 ASSERT_FALSE (integer_onep (f_0));
15330 ASSERT_FALSE (integer_onep (wr_f_0));
15331 ASSERT_FALSE (integer_onep (f_1));
15332 ASSERT_FALSE (integer_onep (wr_f_1));
15333 ASSERT_FALSE (integer_onep (f_m1));
15334 ASSERT_FALSE (integer_onep (wr_f_m1));
15335 ASSERT_FALSE (integer_onep (c_i_0));
15336 ASSERT_TRUE (integer_onep (c_i_1));
15337 ASSERT_FALSE (integer_onep (c_i_m1));
15338 ASSERT_FALSE (integer_onep (c_f_0));
15339 ASSERT_FALSE (integer_onep (c_f_1));
15340 ASSERT_FALSE (integer_onep (c_f_m1));
15341
15342 /* Test integer_zerop. */
15343 ASSERT_TRUE (integer_zerop (i_0));
15344 ASSERT_TRUE (integer_zerop (wr_i_0));
15345 ASSERT_FALSE (integer_zerop (i_1));
15346 ASSERT_FALSE (integer_zerop (wr_i_1));
15347 ASSERT_FALSE (integer_zerop (i_m1));
15348 ASSERT_FALSE (integer_zerop (wr_i_m1));
15349 ASSERT_FALSE (integer_zerop (f_0));
15350 ASSERT_FALSE (integer_zerop (wr_f_0));
15351 ASSERT_FALSE (integer_zerop (f_1));
15352 ASSERT_FALSE (integer_zerop (wr_f_1));
15353 ASSERT_FALSE (integer_zerop (f_m1));
15354 ASSERT_FALSE (integer_zerop (wr_f_m1));
15355 ASSERT_TRUE (integer_zerop (c_i_0));
15356 ASSERT_FALSE (integer_zerop (c_i_1));
15357 ASSERT_FALSE (integer_zerop (c_i_m1));
15358 ASSERT_FALSE (integer_zerop (c_f_0));
15359 ASSERT_FALSE (integer_zerop (c_f_1));
15360 ASSERT_FALSE (integer_zerop (c_f_m1));
15361
15362 /* Test integer_all_onesp. */
15363 ASSERT_FALSE (integer_all_onesp (i_0));
15364 ASSERT_FALSE (integer_all_onesp (wr_i_0));
15365 ASSERT_FALSE (integer_all_onesp (i_1));
15366 ASSERT_FALSE (integer_all_onesp (wr_i_1));
15367 ASSERT_TRUE (integer_all_onesp (i_m1));
15368 ASSERT_TRUE (integer_all_onesp (wr_i_m1));
15369 ASSERT_FALSE (integer_all_onesp (f_0));
15370 ASSERT_FALSE (integer_all_onesp (wr_f_0));
15371 ASSERT_FALSE (integer_all_onesp (f_1));
15372 ASSERT_FALSE (integer_all_onesp (wr_f_1));
15373 ASSERT_FALSE (integer_all_onesp (f_m1));
15374 ASSERT_FALSE (integer_all_onesp (wr_f_m1));
15375 ASSERT_FALSE (integer_all_onesp (c_i_0));
15376 ASSERT_FALSE (integer_all_onesp (c_i_1));
15377 ASSERT_FALSE (integer_all_onesp (c_i_m1));
15378 ASSERT_FALSE (integer_all_onesp (c_f_0));
15379 ASSERT_FALSE (integer_all_onesp (c_f_1));
15380 ASSERT_FALSE (integer_all_onesp (c_f_m1));
15381
15382 /* Test integer_minus_onep. */
15383 ASSERT_FALSE (integer_minus_onep (i_0));
15384 ASSERT_FALSE (integer_minus_onep (wr_i_0));
15385 ASSERT_FALSE (integer_minus_onep (i_1));
15386 ASSERT_FALSE (integer_minus_onep (wr_i_1));
15387 ASSERT_TRUE (integer_minus_onep (i_m1));
15388 ASSERT_TRUE (integer_minus_onep (wr_i_m1));
15389 ASSERT_FALSE (integer_minus_onep (f_0));
15390 ASSERT_FALSE (integer_minus_onep (wr_f_0));
15391 ASSERT_FALSE (integer_minus_onep (f_1));
15392 ASSERT_FALSE (integer_minus_onep (wr_f_1));
15393 ASSERT_FALSE (integer_minus_onep (f_m1));
15394 ASSERT_FALSE (integer_minus_onep (wr_f_m1));
15395 ASSERT_FALSE (integer_minus_onep (c_i_0));
15396 ASSERT_FALSE (integer_minus_onep (c_i_1));
15397 ASSERT_TRUE (integer_minus_onep (c_i_m1));
15398 ASSERT_FALSE (integer_minus_onep (c_f_0));
15399 ASSERT_FALSE (integer_minus_onep (c_f_1));
15400 ASSERT_FALSE (integer_minus_onep (c_f_m1));
15401
15402 /* Test integer_each_onep. */
15403 ASSERT_FALSE (integer_each_onep (i_0));
15404 ASSERT_FALSE (integer_each_onep (wr_i_0));
15405 ASSERT_TRUE (integer_each_onep (i_1));
15406 ASSERT_TRUE (integer_each_onep (wr_i_1));
15407 ASSERT_FALSE (integer_each_onep (i_m1));
15408 ASSERT_FALSE (integer_each_onep (wr_i_m1));
15409 ASSERT_FALSE (integer_each_onep (f_0));
15410 ASSERT_FALSE (integer_each_onep (wr_f_0));
15411 ASSERT_FALSE (integer_each_onep (f_1));
15412 ASSERT_FALSE (integer_each_onep (wr_f_1));
15413 ASSERT_FALSE (integer_each_onep (f_m1));
15414 ASSERT_FALSE (integer_each_onep (wr_f_m1));
15415 ASSERT_FALSE (integer_each_onep (c_i_0));
15416 ASSERT_FALSE (integer_each_onep (c_i_1));
15417 ASSERT_FALSE (integer_each_onep (c_i_m1));
15418 ASSERT_FALSE (integer_each_onep (c_f_0));
15419 ASSERT_FALSE (integer_each_onep (c_f_1));
15420 ASSERT_FALSE (integer_each_onep (c_f_m1));
15421
15422 /* Test integer_truep. */
15423 ASSERT_FALSE (integer_truep (i_0));
15424 ASSERT_FALSE (integer_truep (wr_i_0));
15425 ASSERT_TRUE (integer_truep (i_1));
15426 ASSERT_TRUE (integer_truep (wr_i_1));
15427 ASSERT_FALSE (integer_truep (i_m1));
15428 ASSERT_FALSE (integer_truep (wr_i_m1));
15429 ASSERT_FALSE (integer_truep (f_0));
15430 ASSERT_FALSE (integer_truep (wr_f_0));
15431 ASSERT_FALSE (integer_truep (f_1));
15432 ASSERT_FALSE (integer_truep (wr_f_1));
15433 ASSERT_FALSE (integer_truep (f_m1));
15434 ASSERT_FALSE (integer_truep (wr_f_m1));
15435 ASSERT_FALSE (integer_truep (c_i_0));
15436 ASSERT_TRUE (integer_truep (c_i_1));
15437 ASSERT_FALSE (integer_truep (c_i_m1));
15438 ASSERT_FALSE (integer_truep (c_f_0));
15439 ASSERT_FALSE (integer_truep (c_f_1));
15440 ASSERT_FALSE (integer_truep (c_f_m1));
15441
15442 /* Test integer_nonzerop. */
15443 ASSERT_FALSE (integer_nonzerop (i_0));
15444 ASSERT_FALSE (integer_nonzerop (wr_i_0));
15445 ASSERT_TRUE (integer_nonzerop (i_1));
15446 ASSERT_TRUE (integer_nonzerop (wr_i_1));
15447 ASSERT_TRUE (integer_nonzerop (i_m1));
15448 ASSERT_TRUE (integer_nonzerop (wr_i_m1));
15449 ASSERT_FALSE (integer_nonzerop (f_0));
15450 ASSERT_FALSE (integer_nonzerop (wr_f_0));
15451 ASSERT_FALSE (integer_nonzerop (f_1));
15452 ASSERT_FALSE (integer_nonzerop (wr_f_1));
15453 ASSERT_FALSE (integer_nonzerop (f_m1));
15454 ASSERT_FALSE (integer_nonzerop (wr_f_m1));
15455 ASSERT_FALSE (integer_nonzerop (c_i_0));
15456 ASSERT_TRUE (integer_nonzerop (c_i_1));
15457 ASSERT_TRUE (integer_nonzerop (c_i_m1));
15458 ASSERT_FALSE (integer_nonzerop (c_f_0));
15459 ASSERT_FALSE (integer_nonzerop (c_f_1));
15460 ASSERT_FALSE (integer_nonzerop (c_f_m1));
15461
15462 /* Test real_zerop. */
15463 ASSERT_FALSE (real_zerop (i_0));
15464 ASSERT_FALSE (real_zerop (wr_i_0));
15465 ASSERT_FALSE (real_zerop (i_1));
15466 ASSERT_FALSE (real_zerop (wr_i_1));
15467 ASSERT_FALSE (real_zerop (i_m1));
15468 ASSERT_FALSE (real_zerop (wr_i_m1));
15469 ASSERT_TRUE (real_zerop (f_0));
15470 ASSERT_TRUE (real_zerop (wr_f_0));
15471 ASSERT_FALSE (real_zerop (f_1));
15472 ASSERT_FALSE (real_zerop (wr_f_1));
15473 ASSERT_FALSE (real_zerop (f_m1));
15474 ASSERT_FALSE (real_zerop (wr_f_m1));
15475 ASSERT_FALSE (real_zerop (c_i_0));
15476 ASSERT_FALSE (real_zerop (c_i_1));
15477 ASSERT_FALSE (real_zerop (c_i_m1));
15478 ASSERT_TRUE (real_zerop (c_f_0));
15479 ASSERT_FALSE (real_zerop (c_f_1));
15480 ASSERT_FALSE (real_zerop (c_f_m1));
15481
15482 /* Test real_onep. */
15483 ASSERT_FALSE (real_onep (i_0));
15484 ASSERT_FALSE (real_onep (wr_i_0));
15485 ASSERT_FALSE (real_onep (i_1));
15486 ASSERT_FALSE (real_onep (wr_i_1));
15487 ASSERT_FALSE (real_onep (i_m1));
15488 ASSERT_FALSE (real_onep (wr_i_m1));
15489 ASSERT_FALSE (real_onep (f_0));
15490 ASSERT_FALSE (real_onep (wr_f_0));
15491 ASSERT_TRUE (real_onep (f_1));
15492 ASSERT_TRUE (real_onep (wr_f_1));
15493 ASSERT_FALSE (real_onep (f_m1));
15494 ASSERT_FALSE (real_onep (wr_f_m1));
15495 ASSERT_FALSE (real_onep (c_i_0));
15496 ASSERT_FALSE (real_onep (c_i_1));
15497 ASSERT_FALSE (real_onep (c_i_m1));
15498 ASSERT_FALSE (real_onep (c_f_0));
15499 ASSERT_TRUE (real_onep (c_f_1));
15500 ASSERT_FALSE (real_onep (c_f_m1));
15501
15502 /* Test real_minus_onep. */
15503 ASSERT_FALSE (real_minus_onep (i_0));
15504 ASSERT_FALSE (real_minus_onep (wr_i_0));
15505 ASSERT_FALSE (real_minus_onep (i_1));
15506 ASSERT_FALSE (real_minus_onep (wr_i_1));
15507 ASSERT_FALSE (real_minus_onep (i_m1));
15508 ASSERT_FALSE (real_minus_onep (wr_i_m1));
15509 ASSERT_FALSE (real_minus_onep (f_0));
15510 ASSERT_FALSE (real_minus_onep (wr_f_0));
15511 ASSERT_FALSE (real_minus_onep (f_1));
15512 ASSERT_FALSE (real_minus_onep (wr_f_1));
15513 ASSERT_TRUE (real_minus_onep (f_m1));
15514 ASSERT_TRUE (real_minus_onep (wr_f_m1));
15515 ASSERT_FALSE (real_minus_onep (c_i_0));
15516 ASSERT_FALSE (real_minus_onep (c_i_1));
15517 ASSERT_FALSE (real_minus_onep (c_i_m1));
15518 ASSERT_FALSE (real_minus_onep (c_f_0));
15519 ASSERT_FALSE (real_minus_onep (c_f_1));
15520 ASSERT_TRUE (real_minus_onep (c_f_m1));
15521
15522 /* Test zerop. */
15523 ASSERT_TRUE (zerop (i_0));
15524 ASSERT_TRUE (zerop (wr_i_0));
15525 ASSERT_FALSE (zerop (i_1));
15526 ASSERT_FALSE (zerop (wr_i_1));
15527 ASSERT_FALSE (zerop (i_m1));
15528 ASSERT_FALSE (zerop (wr_i_m1));
15529 ASSERT_TRUE (zerop (f_0));
15530 ASSERT_TRUE (zerop (wr_f_0));
15531 ASSERT_FALSE (zerop (f_1));
15532 ASSERT_FALSE (zerop (wr_f_1));
15533 ASSERT_FALSE (zerop (f_m1));
15534 ASSERT_FALSE (zerop (wr_f_m1));
15535 ASSERT_TRUE (zerop (c_i_0));
15536 ASSERT_FALSE (zerop (c_i_1));
15537 ASSERT_FALSE (zerop (c_i_m1));
15538 ASSERT_TRUE (zerop (c_f_0));
15539 ASSERT_FALSE (zerop (c_f_1));
15540 ASSERT_FALSE (zerop (c_f_m1));
15541
15542 /* Test tree_expr_nonnegative_p. */
15543 ASSERT_TRUE (tree_expr_nonnegative_p (i_0));
15544 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_0));
15545 ASSERT_TRUE (tree_expr_nonnegative_p (i_1));
15546 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_1));
15547 ASSERT_FALSE (tree_expr_nonnegative_p (i_m1));
15548 ASSERT_FALSE (tree_expr_nonnegative_p (wr_i_m1));
15549 ASSERT_TRUE (tree_expr_nonnegative_p (f_0));
15550 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_0));
15551 ASSERT_TRUE (tree_expr_nonnegative_p (f_1));
15552 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_1));
15553 ASSERT_FALSE (tree_expr_nonnegative_p (f_m1));
15554 ASSERT_FALSE (tree_expr_nonnegative_p (wr_f_m1));
15555 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_0));
15556 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_1));
15557 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_m1));
15558 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_0));
15559 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_1));
15560 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_m1));
15561
15562 /* Test tree_expr_nonzero_p. */
15563 ASSERT_FALSE (tree_expr_nonzero_p (i_0));
15564 ASSERT_FALSE (tree_expr_nonzero_p (wr_i_0));
15565 ASSERT_TRUE (tree_expr_nonzero_p (i_1));
15566 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_1));
15567 ASSERT_TRUE (tree_expr_nonzero_p (i_m1));
15568 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_m1));
15569
15570 /* Test integer_valued_real_p. */
15571 ASSERT_FALSE (integer_valued_real_p (i_0));
15572 ASSERT_TRUE (integer_valued_real_p (f_0));
15573 ASSERT_TRUE (integer_valued_real_p (wr_f_0));
15574 ASSERT_TRUE (integer_valued_real_p (f_1));
15575 ASSERT_TRUE (integer_valued_real_p (wr_f_1));
15576
15577 /* Test integer_pow2p. */
15578 ASSERT_FALSE (integer_pow2p (i_0));
15579 ASSERT_TRUE (integer_pow2p (i_1));
15580 ASSERT_TRUE (integer_pow2p (wr_i_1));
15581
15582 /* Test uniform_integer_cst_p. */
15583 ASSERT_TRUE (uniform_integer_cst_p (i_0));
15584 ASSERT_TRUE (uniform_integer_cst_p (wr_i_0));
15585 ASSERT_TRUE (uniform_integer_cst_p (i_1));
15586 ASSERT_TRUE (uniform_integer_cst_p (wr_i_1));
15587 ASSERT_TRUE (uniform_integer_cst_p (i_m1));
15588 ASSERT_TRUE (uniform_integer_cst_p (wr_i_m1));
15589 ASSERT_FALSE (uniform_integer_cst_p (f_0));
15590 ASSERT_FALSE (uniform_integer_cst_p (wr_f_0));
15591 ASSERT_FALSE (uniform_integer_cst_p (f_1));
15592 ASSERT_FALSE (uniform_integer_cst_p (wr_f_1));
15593 ASSERT_FALSE (uniform_integer_cst_p (f_m1));
15594 ASSERT_FALSE (uniform_integer_cst_p (wr_f_m1));
15595 ASSERT_FALSE (uniform_integer_cst_p (c_i_0));
15596 ASSERT_FALSE (uniform_integer_cst_p (c_i_1));
15597 ASSERT_FALSE (uniform_integer_cst_p (c_i_m1));
15598 ASSERT_FALSE (uniform_integer_cst_p (c_f_0));
15599 ASSERT_FALSE (uniform_integer_cst_p (c_f_1));
15600 ASSERT_FALSE (uniform_integer_cst_p (c_f_m1));
15601 }
15602
15603 /* Check that string escaping works correctly. */
15604
15605 static void
15606 test_escaped_strings (void)
15607 {
15608 int saved_cutoff;
15609 escaped_string msg;
15610
15611 msg.escape (NULL);
15612 /* ASSERT_STREQ does not accept NULL as a valid test
15613 result, so we have to use ASSERT_EQ instead. */
15614 ASSERT_EQ (NULL, (const char *) msg);
15615
15616 msg.escape ("");
15617 ASSERT_STREQ ("", (const char *) msg);
15618
15619 msg.escape ("foobar");
15620 ASSERT_STREQ ("foobar", (const char *) msg);
15621
15622 /* Ensure that we have -fmessage-length set to 0. */
15623 saved_cutoff = pp_line_cutoff (global_dc->printer);
15624 pp_line_cutoff (global_dc->printer) = 0;
15625
15626 msg.escape ("foo\nbar");
15627 ASSERT_STREQ ("foo\\nbar", (const char *) msg);
15628
15629 msg.escape ("\a\b\f\n\r\t\v");
15630 ASSERT_STREQ ("\\a\\b\\f\\n\\r\\t\\v", (const char *) msg);
15631
15632 /* Now repeat the tests with -fmessage-length set to 5. */
15633 pp_line_cutoff (global_dc->printer) = 5;
15634
15635 /* Note that the newline is not translated into an escape. */
15636 msg.escape ("foo\nbar");
15637 ASSERT_STREQ ("foo\nbar", (const char *) msg);
15638
15639 msg.escape ("\a\b\f\n\r\t\v");
15640 ASSERT_STREQ ("\\a\\b\\f\n\\r\\t\\v", (const char *) msg);
15641
15642 /* Restore the original message length setting. */
15643 pp_line_cutoff (global_dc->printer) = saved_cutoff;
15644 }
15645
15646 /* Run all of the selftests within this file. */
15647
15648 void
15649 tree_cc_tests ()
15650 {
15651 test_integer_constants ();
15652 test_identifiers ();
15653 test_labels ();
15654 test_vector_cst_patterns ();
15655 test_location_wrappers ();
15656 test_predicates ();
15657 test_escaped_strings ();
15658 }
15659
15660 } // namespace selftest
15661
15662 #endif /* CHECKING_P */
15663
15664 #include "gt-tree.h"