]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ada/gcc-interface/utils.c
2015-07-07 Andrew MacLeod <amacleod@redhat.com>
[thirdparty/gcc.git] / gcc / ada / gcc-interface / utils.c
CommitLineData
27becfc8 1/****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
62e88f41 9 * Copyright (C) 1992-2015, Free Software Foundation, Inc. *
27becfc8 10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
25
27becfc8 26#include "config.h"
27#include "system.h"
28#include "coretypes.h"
29#include "tm.h"
b20a8bb4 30#include "vec.h"
b20a8bb4 31#include "alias.h"
27becfc8 32#include "tree.h"
9ef16211 33#include "inchash.h"
b20a8bb4 34#include "fold-const.h"
9ed99284 35#include "stringpool.h"
36#include "stor-layout.h"
37#include "attribs.h"
38#include "varasm.h"
27becfc8 39#include "flags.h"
27becfc8 40#include "toplev.h"
0b205f4c 41#include "diagnostic-core.h"
27becfc8 42#include "output.h"
43#include "ggc.h"
44#include "debug.h"
45#include "convert.h"
46#include "target.h"
218e3e4e 47#include "common/common-target.h"
a9a42d49 48#include "langhooks.h"
1140c305 49#include "hash-map.h"
1140c305 50#include "hard-reg-set.h"
1140c305 51#include "function.h"
27becfc8 52#include "cgraph.h"
60388043 53#include "diagnostic.h"
b9ed1410 54#include "timevar.h"
a9a42d49 55#include "tree-dump.h"
27becfc8 56#include "tree-inline.h"
57#include "tree-iterator.h"
27becfc8 58
59#include "ada.h"
60#include "types.h"
61#include "atree.h"
62#include "elists.h"
63#include "namet.h"
64#include "nlists.h"
65#include "stringt.h"
66#include "uintp.h"
67#include "fe.h"
68#include "sinfo.h"
69#include "einfo.h"
70#include "ada-tree.h"
71#include "gigi.h"
72
27becfc8 73/* If nonzero, pretend we are allocating at global level. */
74int force_global;
75
d4b7e0f5 76/* The default alignment of "double" floating-point types, i.e. floating
77 point types whose size is equal to 64 bits, or 0 if this alignment is
78 not specifically capped. */
79int double_float_alignment;
80
81/* The default alignment of "double" or larger scalar types, i.e. scalar
82 types whose size is greater or equal to 64 bits, or 0 if this alignment
83 is not specifically capped. */
84int double_scalar_alignment;
85
b6f6bb02 86/* True if floating-point arithmetics may use wider intermediate results. */
87bool fp_arith_may_widen = true;
88
27becfc8 89/* Tree nodes for the various types and decls we create. */
90tree gnat_std_decls[(int) ADT_LAST];
91
92/* Functions to call for each of the possible raise reasons. */
93tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
94
9a710cc4 95/* Likewise, but with extra info for each of the possible raise reasons. */
9f294c82 96tree gnat_raise_decls_ext[(int) LAST_REASON_CODE + 1];
97
27becfc8 98/* Forward declarations for handlers of attributes. */
99static tree handle_const_attribute (tree *, tree, tree, int, bool *);
100static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
101static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
102static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
103static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
104static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
105static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
4a68fd3c 106static tree handle_leaf_attribute (tree *, tree, tree, int, bool *);
9fac98bb 107static tree handle_always_inline_attribute (tree *, tree, tree, int, bool *);
27becfc8 108static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
109static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
5595eac6 110static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
52dd2567 111static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
27becfc8 112
113/* Fake handler for attributes we don't properly support, typically because
114 they'd require dragging a lot of the common-c front-end circuitry. */
115static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
116
117/* Table of machine-independent internal attributes for Ada. We support
118 this minimal set of attributes to accommodate the needs of builtins. */
119const struct attribute_spec gnat_internal_attribute_table[] =
120{
ac86af5d 121 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
122 affects_type_identity } */
123 { "const", 0, 0, true, false, false, handle_const_attribute,
124 false },
125 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute,
126 false },
127 { "pure", 0, 0, true, false, false, handle_pure_attribute,
128 false },
129 { "no vops", 0, 0, true, false, false, handle_novops_attribute,
130 false },
131 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute,
132 false },
133 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute,
134 false },
135 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute,
136 false },
137 { "leaf", 0, 0, true, false, false, handle_leaf_attribute,
138 false },
9fac98bb 139 { "always_inline",0, 0, true, false, false, handle_always_inline_attribute,
140 false },
ac86af5d 141 { "malloc", 0, 0, true, false, false, handle_malloc_attribute,
142 false },
143 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute,
144 false },
145
146 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute,
147 false },
148 { "vector_type", 0, 0, false, true, false, handle_vector_type_attribute,
149 false },
150 { "may_alias", 0, 0, false, true, false, NULL, false },
27becfc8 151
152 /* ??? format and format_arg are heavy and not supported, which actually
153 prevents support for stdio builtins, which we however declare as part
154 of the common builtins.def contents. */
ac86af5d 155 { "format", 3, 3, false, true, true, fake_attribute_handler, false },
156 { "format_arg", 1, 1, false, true, true, fake_attribute_handler, false },
27becfc8 157
ac86af5d 158 { NULL, 0, 0, false, false, false, NULL, false }
27becfc8 159};
160
161/* Associates a GNAT tree node to a GCC tree node. It is used in
162 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
163 of `save_gnu_tree' for more info. */
164static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
165
166#define GET_GNU_TREE(GNAT_ENTITY) \
167 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
168
169#define SET_GNU_TREE(GNAT_ENTITY,VAL) \
170 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
171
172#define PRESENT_GNU_TREE(GNAT_ENTITY) \
173 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
174
175/* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
176static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
177
178#define GET_DUMMY_NODE(GNAT_ENTITY) \
179 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
180
181#define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
182 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
183
184#define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
185 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
186
187/* This variable keeps a table for types for each precision so that we only
188 allocate each of them once. Signed and unsigned types are kept separate.
189
190 Note that these types are only used when fold-const requests something
191 special. Perhaps we should NOT share these types; we'll see how it
192 goes later. */
193static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
194
195/* Likewise for float types, but record these by mode. */
196static GTY(()) tree float_types[NUM_MACHINE_MODES];
197
198/* For each binding contour we allocate a binding_level structure to indicate
199 the binding depth. */
200
fb1e4f4a 201struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
27becfc8 202 /* The binding level containing this one (the enclosing binding level). */
203 struct gnat_binding_level *chain;
204 /* The BLOCK node for this level. */
205 tree block;
206 /* If nonzero, the setjmp buffer that needs to be updated for any
207 variable-sized definition within this context. */
208 tree jmpbuf_decl;
209};
210
211/* The binding level currently in effect. */
212static GTY(()) struct gnat_binding_level *current_binding_level;
213
214/* A chain of gnat_binding_level structures awaiting reuse. */
215static GTY((deletable)) struct gnat_binding_level *free_binding_level;
216
eac916f9 217/* The context to be used for global declarations. */
218static GTY(()) tree global_context;
219
0c6fd2e5 220/* An array of global declarations. */
221static GTY(()) vec<tree, va_gc> *global_decls;
27becfc8 222
223/* An array of builtin function declarations. */
f1f41a6c 224static GTY(()) vec<tree, va_gc> *builtin_decls;
27becfc8 225
27becfc8 226/* A chain of unused BLOCK nodes. */
227static GTY((deletable)) tree free_block_chain;
228
76cb9822 229/* A hash table of padded types. It is modelled on the generic type
230 hash table in tree.c, which must thus be used as a reference. */
d1023d12 231
232struct GTY((for_user)) pad_type_hash {
76cb9822 233 unsigned long hash;
234 tree type;
235};
236
eae1ecb4 237struct pad_type_hasher : ggc_cache_ptr_hash<pad_type_hash>
d1023d12 238{
239 static inline hashval_t hash (pad_type_hash *t) { return t->hash; }
240 static bool equal (pad_type_hash *a, pad_type_hash *b);
99378011 241 static int keep_cache_entry (pad_type_hash *&);
d1023d12 242};
243
244static GTY ((cache))
245 hash_table<pad_type_hasher> *pad_type_hash_table;
76cb9822 246
27becfc8 247static tree merge_sizes (tree, tree, tree, bool, bool);
248static tree compute_related_constant (tree, tree);
249static tree split_plus (tree, tree *);
3754d046 250static tree float_type_for_precision (int, machine_mode);
27becfc8 251static tree convert_to_fat_pointer (tree, tree);
819ab09a 252static unsigned int scale_by_factor_of (tree, unsigned int);
27becfc8 253static bool potential_alignment_gap (tree, tree, tree);
810e94f8 254
255/* A linked list used as a queue to defer the initialization of the
256 DECL_CONTEXT attribute of ..._DECL nodes and of the TYPE_CONTEXT attribute
257 of ..._TYPE nodes. */
258struct deferred_decl_context_node
259{
260 tree decl; /* The ..._DECL node to work on. */
261 Entity_Id gnat_scope; /* The corresponding entity's Scope attribute. */
262 int force_global; /* force_global value when pushing DECL. */
263 vec<tree, va_heap, vl_ptr> types; /* A list of ..._TYPE nodes to propagate the
264 context to. */
265 struct deferred_decl_context_node *next; /* The next queue item. */
266};
267
268static struct deferred_decl_context_node *deferred_decl_context_queue = NULL;
269
270/* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
271 feed it with the elaboration of GNAT_SCOPE. */
272static struct deferred_decl_context_node *
273add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global);
274
275/* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
276 feed it with the DECL_CONTEXT computed as part of N as soon as it is
277 computed. */
278static void add_deferred_type_context (struct deferred_decl_context_node *n,
279 tree type);
27becfc8 280\f
76cb9822 281/* Initialize data structures of the utils.c module. */
27becfc8 282
283void
76cb9822 284init_gnat_utils (void)
27becfc8 285{
76cb9822 286 /* Initialize the association of GNAT nodes to GCC trees. */
25a27413 287 associate_gnat_to_gnu = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
76cb9822 288
289 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
25a27413 290 dummy_node_table = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
76cb9822 291
292 /* Initialize the hash table of padded types. */
d1023d12 293 pad_type_hash_table = hash_table<pad_type_hasher>::create_ggc (512);
27becfc8 294}
295
76cb9822 296/* Destroy data structures of the utils.c module. */
5154a747 297
298void
76cb9822 299destroy_gnat_utils (void)
5154a747 300{
76cb9822 301 /* Destroy the association of GNAT nodes to GCC trees. */
5154a747 302 ggc_free (associate_gnat_to_gnu);
303 associate_gnat_to_gnu = NULL;
5154a747 304
76cb9822 305 /* Destroy the association of GNAT nodes to GCC trees as dummies. */
306 ggc_free (dummy_node_table);
307 dummy_node_table = NULL;
308
309 /* Destroy the hash table of padded types. */
d1023d12 310 pad_type_hash_table->empty ();
76cb9822 311 pad_type_hash_table = NULL;
76cb9822 312}
313\f
bff37289 314/* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
315 tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
316 If NO_CHECK is true, the latter check is suppressed.
27becfc8 317
bff37289 318 If GNU_DECL is zero, reset a previous association. */
27becfc8 319
320void
321save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
322{
323 /* Check that GNAT_ENTITY is not already defined and that it is being set
bff37289 324 to something which is a decl. If that is not the case, this usually
27becfc8 325 means GNAT_ENTITY is defined twice, but occasionally is due to some
326 Gigi problem. */
327 gcc_assert (!(gnu_decl
328 && (PRESENT_GNU_TREE (gnat_entity)
329 || (!no_check && !DECL_P (gnu_decl)))));
330
331 SET_GNU_TREE (gnat_entity, gnu_decl);
332}
333
bff37289 334/* GNAT_ENTITY is a GNAT tree node for an entity. Return the GCC tree node
335 that was associated with it. If there is no such tree node, abort.
27becfc8 336
337 In some cases, such as delayed elaboration or expressions that need to
338 be elaborated only once, GNAT_ENTITY is really not an entity. */
339
340tree
341get_gnu_tree (Entity_Id gnat_entity)
342{
343 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
344 return GET_GNU_TREE (gnat_entity);
345}
346
347/* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
348
349bool
350present_gnu_tree (Entity_Id gnat_entity)
351{
352 return PRESENT_GNU_TREE (gnat_entity);
353}
354\f
27becfc8 355/* Make a dummy type corresponding to GNAT_TYPE. */
356
357tree
358make_dummy_type (Entity_Id gnat_type)
359{
bcde54d5 360 Entity_Id gnat_equiv = Gigi_Equivalent_Type (Underlying_Type (gnat_type));
27becfc8 361 tree gnu_type;
362
27becfc8 363 /* If there was no equivalent type (can only happen when just annotating
364 types) or underlying type, go back to the original type. */
bcde54d5 365 if (No (gnat_equiv))
366 gnat_equiv = gnat_type;
27becfc8 367
368 /* If it there already a dummy type, use that one. Else make one. */
bcde54d5 369 if (PRESENT_DUMMY_NODE (gnat_equiv))
370 return GET_DUMMY_NODE (gnat_equiv);
27becfc8 371
372 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
373 an ENUMERAL_TYPE. */
bcde54d5 374 gnu_type = make_node (Is_Record_Type (gnat_equiv)
375 ? tree_code_for_record_type (gnat_equiv)
27becfc8 376 : ENUMERAL_TYPE);
377 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
378 TYPE_DUMMY_P (gnu_type) = 1;
515c6c6c 379 TYPE_STUB_DECL (gnu_type)
380 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
bcde54d5 381 if (Is_By_Reference_Type (gnat_equiv))
a3b35344 382 TYPE_BY_REFERENCE_P (gnu_type) = 1;
27becfc8 383
bcde54d5 384 SET_DUMMY_NODE (gnat_equiv, gnu_type);
27becfc8 385
386 return gnu_type;
387}
41dd28aa 388
389/* Return the dummy type that was made for GNAT_TYPE, if any. */
390
391tree
392get_dummy_type (Entity_Id gnat_type)
393{
394 return GET_DUMMY_NODE (gnat_type);
395}
396
397/* Build dummy fat and thin pointer types whose designated type is specified
398 by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter. */
399
400void
401build_dummy_unc_pointer_types (Entity_Id gnat_desig_type, tree gnu_desig_type)
402{
403 tree gnu_template_type, gnu_ptr_template, gnu_array_type, gnu_ptr_array;
404 tree gnu_fat_type, fields, gnu_object_type;
405
406 gnu_template_type = make_node (RECORD_TYPE);
407 TYPE_NAME (gnu_template_type) = create_concat_name (gnat_desig_type, "XUB");
408 TYPE_DUMMY_P (gnu_template_type) = 1;
409 gnu_ptr_template = build_pointer_type (gnu_template_type);
410
411 gnu_array_type = make_node (ENUMERAL_TYPE);
412 TYPE_NAME (gnu_array_type) = create_concat_name (gnat_desig_type, "XUA");
413 TYPE_DUMMY_P (gnu_array_type) = 1;
414 gnu_ptr_array = build_pointer_type (gnu_array_type);
415
416 gnu_fat_type = make_node (RECORD_TYPE);
417 /* Build a stub DECL to trigger the special processing for fat pointer types
418 in gnat_pushdecl. */
419 TYPE_NAME (gnu_fat_type)
420 = create_type_stub_decl (create_concat_name (gnat_desig_type, "XUP"),
421 gnu_fat_type);
422 fields = create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array,
423 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
424 DECL_CHAIN (fields)
425 = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template,
426 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
427 finish_fat_pointer_type (gnu_fat_type, fields);
428 SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type, gnu_desig_type);
429 /* Suppress debug info until after the type is completed. */
430 TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type)) = 1;
431
432 gnu_object_type = make_node (RECORD_TYPE);
433 TYPE_NAME (gnu_object_type) = create_concat_name (gnat_desig_type, "XUT");
434 TYPE_DUMMY_P (gnu_object_type) = 1;
435
436 TYPE_POINTER_TO (gnu_desig_type) = gnu_fat_type;
437 TYPE_OBJECT_RECORD_TYPE (gnu_desig_type) = gnu_object_type;
438}
27becfc8 439\f
1d2bb655 440/* Return true if we are in the global binding level. */
27becfc8 441
1d2bb655 442bool
27becfc8 443global_bindings_p (void)
444{
1d2bb655 445 return force_global || current_function_decl == NULL_TREE;
27becfc8 446}
447
db72a63f 448/* Enter a new binding level. */
27becfc8 449
450void
c88e6a4f 451gnat_pushlevel (void)
27becfc8 452{
453 struct gnat_binding_level *newlevel = NULL;
454
455 /* Reuse a struct for this binding level, if there is one. */
456 if (free_binding_level)
457 {
458 newlevel = free_binding_level;
459 free_binding_level = free_binding_level->chain;
460 }
461 else
25a27413 462 newlevel = ggc_alloc<gnat_binding_level> ();
27becfc8 463
464 /* Use a free BLOCK, if any; otherwise, allocate one. */
465 if (free_block_chain)
466 {
467 newlevel->block = free_block_chain;
468 free_block_chain = BLOCK_CHAIN (free_block_chain);
469 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
470 }
471 else
472 newlevel->block = make_node (BLOCK);
473
474 /* Point the BLOCK we just made to its parent. */
475 if (current_binding_level)
476 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
477
db72a63f 478 BLOCK_VARS (newlevel->block) = NULL_TREE;
479 BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
27becfc8 480 TREE_USED (newlevel->block) = 1;
481
db72a63f 482 /* Add this level to the front of the chain (stack) of active levels. */
27becfc8 483 newlevel->chain = current_binding_level;
484 newlevel->jmpbuf_decl = NULL_TREE;
485 current_binding_level = newlevel;
486}
487
488/* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
489 and point FNDECL to this BLOCK. */
490
491void
492set_current_block_context (tree fndecl)
493{
494 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
495 DECL_INITIAL (fndecl) = current_binding_level->block;
db72a63f 496 set_block_for_group (current_binding_level->block);
27becfc8 497}
498
499/* Set the jmpbuf_decl for the current binding level to DECL. */
500
501void
502set_block_jmpbuf_decl (tree decl)
503{
504 current_binding_level->jmpbuf_decl = decl;
505}
506
507/* Get the jmpbuf_decl, if any, for the current binding level. */
508
509tree
c88e6a4f 510get_block_jmpbuf_decl (void)
27becfc8 511{
512 return current_binding_level->jmpbuf_decl;
513}
514
db72a63f 515/* Exit a binding level. Set any BLOCK into the current code group. */
27becfc8 516
517void
c88e6a4f 518gnat_poplevel (void)
27becfc8 519{
520 struct gnat_binding_level *level = current_binding_level;
521 tree block = level->block;
522
523 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
43f0f599 524 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
27becfc8 525
526 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
527 are no variables free the block and merge its subblocks into those of its
db72a63f 528 parent block. Otherwise, add it to the list of its parent. */
27becfc8 529 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
530 ;
531 else if (BLOCK_VARS (block) == NULL_TREE)
532 {
533 BLOCK_SUBBLOCKS (level->chain->block)
2149d019 534 = block_chainon (BLOCK_SUBBLOCKS (block),
535 BLOCK_SUBBLOCKS (level->chain->block));
27becfc8 536 BLOCK_CHAIN (block) = free_block_chain;
537 free_block_chain = block;
538 }
539 else
540 {
541 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
542 BLOCK_SUBBLOCKS (level->chain->block) = block;
543 TREE_USED (block) = 1;
544 set_block_for_group (block);
545 }
546
547 /* Free this binding structure. */
548 current_binding_level = level->chain;
549 level->chain = free_binding_level;
550 free_binding_level = level;
551}
552
24ffee3d 553/* Exit a binding level and discard the associated BLOCK. */
554
555void
556gnat_zaplevel (void)
557{
558 struct gnat_binding_level *level = current_binding_level;
559 tree block = level->block;
560
561 BLOCK_CHAIN (block) = free_block_chain;
562 free_block_chain = block;
563
564 /* Free this binding structure. */
565 current_binding_level = level->chain;
566 level->chain = free_binding_level;
567 free_binding_level = level;
568}
27becfc8 569\f
1d9cffb8 570/* Set the context of TYPE and its parallel types (if any) to CONTEXT. */
571
572static void
573gnat_set_type_context (tree type, tree context)
574{
575 tree decl = TYPE_STUB_DECL (type);
576
577 TYPE_CONTEXT (type) = context;
578
579 while (decl && DECL_PARALLEL_TYPE (decl))
580 {
ba502e2b 581 tree parallel_type = DECL_PARALLEL_TYPE (decl);
582
583 /* Give a context to the parallel types and their stub decl, if any.
584 Some parallel types seems to be present in multiple parallel type
585 chains, so don't mess with their context if they already have one. */
586 if (TYPE_CONTEXT (parallel_type) == NULL_TREE)
587 {
588 if (TYPE_STUB_DECL (parallel_type) != NULL_TREE)
589 DECL_CONTEXT (TYPE_STUB_DECL (parallel_type)) = context;
590 TYPE_CONTEXT (parallel_type) = context;
591 }
592
1d9cffb8 593 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
594 }
595}
596
810e94f8 597/* Return the innermost scope, starting at GNAT_NODE, we are be interested in
598 the debug info, or Empty if there is no such scope. If not NULL, set
599 IS_SUBPROGRAM to whether the returned entity is a subprogram. */
600
601static Entity_Id
602get_debug_scope (Node_Id gnat_node, bool *is_subprogram)
603{
604 Entity_Id gnat_entity;
605
606 if (is_subprogram)
607 *is_subprogram = false;
608
609 if (Nkind (gnat_node) == N_Defining_Identifier)
610 gnat_entity = Scope (gnat_node);
611 else
612 return Empty;
613
614 while (Present (gnat_entity))
615 {
616 switch (Ekind (gnat_entity))
617 {
618 case E_Function:
619 case E_Procedure:
620 if (Present (Protected_Body_Subprogram (gnat_entity)))
621 gnat_entity = Protected_Body_Subprogram (gnat_entity);
622
623 /* If the scope is a subprogram, then just rely on
624 current_function_decl, so that we don't have to defer
625 anything. This is needed because other places rely on the
626 validity of the DECL_CONTEXT attribute of FUNCTION_DECL nodes. */
627 if (is_subprogram)
628 *is_subprogram = true;
629 return gnat_entity;
630
631 case E_Record_Type:
632 case E_Record_Subtype:
633 return gnat_entity;
634
635 default:
636 /* By default, we are not interested in this particular scope: go to
637 the outer one. */
638 break;
639 }
640 gnat_entity = Scope (gnat_entity);
641 }
642 return Empty;
643}
644
645/* If N is NULL, set TYPE's context to CONTEXT. Defer this to the processing of
646 N otherwise. */
647
648static void
649defer_or_set_type_context (tree type,
650 tree context,
651 struct deferred_decl_context_node *n)
652{
653 if (n)
654 add_deferred_type_context (n, type);
655 else
656 gnat_set_type_context (type, context);
657}
658
659/* Return global_context. Create it if needed, first. */
660
661static tree
662get_global_context (void)
663{
664 if (!global_context)
f2023823 665 {
666 global_context = build_translation_unit_decl (NULL_TREE);
667 debug_hooks->register_main_translation_unit (global_context);
668 }
810e94f8 669 return global_context;
670}
671
eac916f9 672/* Record DECL as belonging to the current lexical scope and use GNAT_NODE
673 for location information and flag propagation. */
27becfc8 674
675void
676gnat_pushdecl (tree decl, Node_Id gnat_node)
677{
810e94f8 678 tree context = NULL_TREE;
679 struct deferred_decl_context_node *deferred_decl_context = NULL;
680
681 /* If explicitely asked to make DECL global or if it's an imported nested
682 object, short-circuit the regular Scope-based context computation. */
683 if (!((TREE_PUBLIC (decl) && DECL_EXTERNAL (decl)) || force_global == 1))
27becfc8 684 {
810e94f8 685 /* Rely on the GNAT scope, or fallback to the current_function_decl if
686 the GNAT scope reached the global scope, if it reached a subprogram
687 or the declaration is a subprogram or a variable (for them we skip
688 intermediate context types because the subprogram body elaboration
689 machinery and the inliner both expect a subprogram context).
690
691 Falling back to current_function_decl is necessary for implicit
692 subprograms created by gigi, such as the elaboration subprograms. */
693 bool context_is_subprogram = false;
694 const Entity_Id gnat_scope
695 = get_debug_scope (gnat_node, &context_is_subprogram);
696
697 if (Present (gnat_scope)
698 && !context_is_subprogram
699 && TREE_CODE (decl) != FUNCTION_DECL
700 && TREE_CODE (decl) != VAR_DECL)
701 /* Always assume the scope has not been elaborated, thus defer the
702 context propagation to the time its elaboration will be
703 available. */
704 deferred_decl_context
705 = add_deferred_decl_context (decl, gnat_scope, force_global);
706
707 /* External declarations (when force_global > 0) may not be in a
708 local context. */
709 else if (current_function_decl != NULL_TREE && force_global == 0)
710 context = current_function_decl;
27becfc8 711 }
712
810e94f8 713 /* If either we are forced to be in global mode or if both the GNAT scope and
714 the current_function_decl did not help determining the context, use the
715 global scope. */
716 if (!deferred_decl_context && context == NULL_TREE)
717 context = get_global_context ();
718
719 /* Functions imported in another function are not really nested.
720 For really nested functions mark them initially as needing
721 a static chain for uses of that flag before unnesting;
722 lower_nested_functions will then recompute it. */
723 if (TREE_CODE (decl) == FUNCTION_DECL
724 && !TREE_PUBLIC (decl)
725 && context != NULL_TREE
726 && (TREE_CODE (context) == FUNCTION_DECL
727 || decl_function_context (context) != NULL_TREE))
728 DECL_STATIC_CHAIN (decl) = 1;
729
730 if (!deferred_decl_context)
731 DECL_CONTEXT (decl) = context;
732
eac916f9 733 TREE_NO_WARNING (decl) = (No (gnat_node) || Warnings_Off (gnat_node));
27becfc8 734
735 /* Set the location of DECL and emit a declaration for it. */
11f3f0ba 736 if (Present (gnat_node) && !renaming_from_generic_instantiation_p (gnat_node))
27becfc8 737 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
eac916f9 738
27becfc8 739 add_decl_expr (decl, gnat_node);
740
741 /* Put the declaration on the list. The list of declarations is in reverse
24ffee3d 742 order. The list will be reversed later. Put global declarations in the
743 globals list and local ones in the current block. But skip TYPE_DECLs
744 for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
745 with the debugger and aren't needed anyway. */
746 if (!(TREE_CODE (decl) == TYPE_DECL
747 && TREE_CODE (TREE_TYPE (decl)) == UNCONSTRAINED_ARRAY_TYPE))
27becfc8 748 {
f29f9ff7 749 if (DECL_EXTERNAL (decl))
27becfc8 750 {
27becfc8 751 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
f1f41a6c 752 vec_safe_push (builtin_decls, decl);
27becfc8 753 }
f29f9ff7 754 else if (global_bindings_p ())
0c6fd2e5 755 vec_safe_push (global_decls, decl);
f29f9ff7 756 else
27becfc8 757 {
04dd9724 758 DECL_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
759 BLOCK_VARS (current_binding_level->block) = decl;
27becfc8 760 }
761 }
762
a10d3a24 763 /* For the declaration of a type, set its name either if it isn't already
515c6c6c 764 set or if the previous type name was not derived from a source name.
27becfc8 765 We'd rather have the type named with a real name and all the pointer
a10d3a24 766 types to the same object have the same node, except when the names are
767 both derived from source names. */
27becfc8 768 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
769 {
770 tree t = TREE_TYPE (decl);
771
3a49fd1c 772 /* Array and pointer types aren't tagged types in the C sense so we need
773 to generate a typedef in DWARF for them and make sure it is preserved,
774 unless the type is artificial. */
a10d3a24 775 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
3a49fd1c 776 && ((TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != POINTER_TYPE)
777 || DECL_ARTIFICIAL (decl)))
778 ;
779 /* For array and pointer types, create the DECL_ORIGINAL_TYPE that will
780 generate the typedef in DWARF. Also do that for fat pointer types
781 because, even though they are tagged types in the C sense, they are
782 still XUP types attached to the base array type at this point. */
a10d3a24 783 else if (!DECL_ARTIFICIAL (decl)
3a49fd1c 784 && (TREE_CODE (t) == ARRAY_TYPE
785 || TREE_CODE (t) == POINTER_TYPE
786 || TYPE_IS_FAT_POINTER_P (t)))
27becfc8 787 {
a10d3a24 788 tree tt;
9c3bb3a6 789 /* ??? Copy and original type are not supposed to be variant but we
790 really need a variant for the placeholder machinery to work. */
a10d3a24 791 if (TYPE_IS_FAT_POINTER_P (t))
792 tt = build_variant_type_copy (t);
793 else
9c3bb3a6 794 {
795 /* TYPE_NEXT_PTR_TO is a chain of main variants. */
796 tt = build_distinct_type_copy (TYPE_MAIN_VARIANT (t));
3a49fd1c 797 if (TREE_CODE (t) == POINTER_TYPE)
798 TYPE_NEXT_PTR_TO (TYPE_MAIN_VARIANT (t)) = tt;
9c3bb3a6 799 tt = build_qualified_type (tt, TYPE_QUALS (t));
800 }
27becfc8 801 TYPE_NAME (tt) = decl;
810e94f8 802 defer_or_set_type_context (tt,
803 DECL_CONTEXT (decl),
804 deferred_decl_context);
27becfc8 805 TREE_USED (tt) = TREE_USED (t);
806 TREE_TYPE (decl) = tt;
3a49fd1c 807 if (TYPE_NAME (t)
a10d3a24 808 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
809 && DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
cf07a590 810 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
811 else
812 DECL_ORIGINAL_TYPE (decl) = t;
3a49fd1c 813 /* Array types need to have a name so that they can be related to
814 their GNAT encodings. */
815 if (TREE_CODE (t) == ARRAY_TYPE && !TYPE_NAME (t))
816 TYPE_NAME (t) = DECL_NAME (decl);
41dd28aa 817 t = NULL_TREE;
27becfc8 818 }
3a49fd1c 819 else if (TYPE_NAME (t)
a10d3a24 820 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
821 && DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
27becfc8 822 ;
823 else
824 t = NULL_TREE;
825
3a49fd1c 826 /* Propagate the name to all the variants, this is needed for the type
827 qualifiers machinery to work properly (see check_qualified_type).
828 Also propagate the context to them. Note that it will be propagated
829 to all parallel types too thanks to gnat_set_type_context. */
27becfc8 830 if (t)
831 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
3a49fd1c 832 /* ??? Because of the previous kludge, we can have variants of fat
833 pointer types with different names. */
834 if (!(TYPE_IS_FAT_POINTER_P (t)
835 && TYPE_NAME (t)
836 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
66f0b044 837 {
838 TYPE_NAME (t) = decl;
810e94f8 839 defer_or_set_type_context (t,
840 DECL_CONTEXT (decl),
841 deferred_decl_context);
66f0b044 842 }
27becfc8 843 }
844}
845\f
76cb9822 846/* Create a record type that contains a SIZE bytes long field of TYPE with a
847 starting bit position so that it is aligned to ALIGN bits, and leaving at
848 least ROOM bytes free before the field. BASE_ALIGN is the alignment the
5dd00251 849 record is guaranteed to get. GNAT_NODE is used for the position of the
850 associated TYPE_DECL. */
76cb9822 851
852tree
853make_aligning_type (tree type, unsigned int align, tree size,
5dd00251 854 unsigned int base_align, int room, Node_Id gnat_node)
76cb9822 855{
856 /* We will be crafting a record type with one field at a position set to be
857 the next multiple of ALIGN past record'address + room bytes. We use a
858 record placeholder to express record'address. */
859 tree record_type = make_node (RECORD_TYPE);
860 tree record = build0 (PLACEHOLDER_EXPR, record_type);
861
862 tree record_addr_st
863 = convert (sizetype, build_unary_op (ADDR_EXPR, NULL_TREE, record));
864
865 /* The diagram below summarizes the shape of what we manipulate:
866
867 <--------- pos ---------->
868 { +------------+-------------+-----------------+
869 record =>{ |############| ... | field (type) |
870 { +------------+-------------+-----------------+
871 |<-- room -->|<- voffset ->|<---- size ----->|
872 o o
873 | |
874 record_addr vblock_addr
875
876 Every length is in sizetype bytes there, except "pos" which has to be
877 set as a bit position in the GCC tree for the record. */
878 tree room_st = size_int (room);
879 tree vblock_addr_st = size_binop (PLUS_EXPR, record_addr_st, room_st);
880 tree voffset_st, pos, field;
881
48dcce25 882 tree name = TYPE_IDENTIFIER (type);
76cb9822 883
76cb9822 884 name = concat_name (name, "ALIGN");
885 TYPE_NAME (record_type) = name;
886
887 /* Compute VOFFSET and then POS. The next byte position multiple of some
888 alignment after some address is obtained by "and"ing the alignment minus
889 1 with the two's complement of the address. */
890 voffset_st = size_binop (BIT_AND_EXPR,
891 fold_build1 (NEGATE_EXPR, sizetype, vblock_addr_st),
892 size_int ((align / BITS_PER_UNIT) - 1));
893
894 /* POS = (ROOM + VOFFSET) * BIT_PER_UNIT, in bitsizetype. */
895 pos = size_binop (MULT_EXPR,
896 convert (bitsizetype,
897 size_binop (PLUS_EXPR, room_st, voffset_st)),
898 bitsize_unit_node);
899
900 /* Craft the GCC record representation. We exceptionally do everything
901 manually here because 1) our generic circuitry is not quite ready to
902 handle the complex position/size expressions we are setting up, 2) we
903 have a strong simplifying factor at hand: we know the maximum possible
904 value of voffset, and 3) we have to set/reset at least the sizes in
905 accordance with this maximum value anyway, as we need them to convey
906 what should be "alloc"ated for this type.
907
908 Use -1 as the 'addressable' indication for the field to prevent the
909 creation of a bitfield. We don't need one, it would have damaging
910 consequences on the alignment computation, and create_field_decl would
911 make one without this special argument, for instance because of the
912 complex position expression. */
913 field = create_field_decl (get_identifier ("F"), type, record_type, size,
914 pos, 1, -1);
915 TYPE_FIELDS (record_type) = field;
916
917 TYPE_ALIGN (record_type) = base_align;
918 TYPE_USER_ALIGN (record_type) = 1;
919
920 TYPE_SIZE (record_type)
921 = size_binop (PLUS_EXPR,
922 size_binop (MULT_EXPR, convert (bitsizetype, size),
923 bitsize_unit_node),
924 bitsize_int (align + room * BITS_PER_UNIT));
925 TYPE_SIZE_UNIT (record_type)
926 = size_binop (PLUS_EXPR, size,
927 size_int (room + align / BITS_PER_UNIT));
928
929 SET_TYPE_MODE (record_type, BLKmode);
930 relate_alias_sets (record_type, type, ALIAS_SET_COPY);
931
932 /* Declare it now since it will never be declared otherwise. This is
933 necessary to ensure that its subtrees are properly marked. */
081f18cf 934 create_type_decl (name, record_type, true, false, gnat_node);
76cb9822 935
936 return record_type;
937}
938
939/* TYPE is a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE that is being used
940 as the field type of a packed record if IN_RECORD is true, or as the
941 component type of a packed array if IN_RECORD is false. See if we can
942 rewrite it either as a type that has a non-BLKmode, which we can pack
943 tighter in the packed record case, or as a smaller type. If so, return
944 the new type. If not, return the original type. */
945
946tree
947make_packable_type (tree type, bool in_record)
948{
e913b5cd 949 unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE (type));
76cb9822 950 unsigned HOST_WIDE_INT new_size;
951 tree new_type, old_field, field_list = NULL_TREE;
952 unsigned int align;
953
954 /* No point in doing anything if the size is zero. */
955 if (size == 0)
956 return type;
957
958 new_type = make_node (TREE_CODE (type));
959
960 /* Copy the name and flags from the old type to that of the new.
961 Note that we rely on the pointer equality created here for
962 TYPE_NAME to look through conversions in various places. */
963 TYPE_NAME (new_type) = TYPE_NAME (type);
964 TYPE_JUSTIFIED_MODULAR_P (new_type) = TYPE_JUSTIFIED_MODULAR_P (type);
965 TYPE_CONTAINS_TEMPLATE_P (new_type) = TYPE_CONTAINS_TEMPLATE_P (type);
966 if (TREE_CODE (type) == RECORD_TYPE)
967 TYPE_PADDING_P (new_type) = TYPE_PADDING_P (type);
968
969 /* If we are in a record and have a small size, set the alignment to
970 try for an integral mode. Otherwise set it to try for a smaller
971 type with BLKmode. */
972 if (in_record && size <= MAX_FIXED_MODE_SIZE)
973 {
974 align = ceil_pow2 (size);
975 TYPE_ALIGN (new_type) = align;
976 new_size = (size + align - 1) & -align;
977 }
978 else
979 {
980 unsigned HOST_WIDE_INT align;
981
982 /* Do not try to shrink the size if the RM size is not constant. */
983 if (TYPE_CONTAINS_TEMPLATE_P (type)
e913b5cd 984 || !tree_fits_uhwi_p (TYPE_ADA_SIZE (type)))
76cb9822 985 return type;
986
987 /* Round the RM size up to a unit boundary to get the minimal size
988 for a BLKmode record. Give up if it's already the size. */
e913b5cd 989 new_size = tree_to_uhwi (TYPE_ADA_SIZE (type));
76cb9822 990 new_size = (new_size + BITS_PER_UNIT - 1) & -BITS_PER_UNIT;
991 if (new_size == size)
992 return type;
993
994 align = new_size & -new_size;
995 TYPE_ALIGN (new_type) = MIN (TYPE_ALIGN (type), align);
996 }
997
998 TYPE_USER_ALIGN (new_type) = 1;
999
1000 /* Now copy the fields, keeping the position and size as we don't want
1001 to change the layout by propagating the packedness downwards. */
1002 for (old_field = TYPE_FIELDS (type); old_field;
1003 old_field = DECL_CHAIN (old_field))
1004 {
1005 tree new_field_type = TREE_TYPE (old_field);
1006 tree new_field, new_size;
1007
1008 if (RECORD_OR_UNION_TYPE_P (new_field_type)
1009 && !TYPE_FAT_POINTER_P (new_field_type)
e913b5cd 1010 && tree_fits_uhwi_p (TYPE_SIZE (new_field_type)))
76cb9822 1011 new_field_type = make_packable_type (new_field_type, true);
1012
1013 /* However, for the last field in a not already packed record type
1014 that is of an aggregate type, we need to use the RM size in the
1015 packable version of the record type, see finish_record_type. */
1016 if (!DECL_CHAIN (old_field)
1017 && !TYPE_PACKED (type)
1018 && RECORD_OR_UNION_TYPE_P (new_field_type)
1019 && !TYPE_FAT_POINTER_P (new_field_type)
1020 && !TYPE_CONTAINS_TEMPLATE_P (new_field_type)
1021 && TYPE_ADA_SIZE (new_field_type))
1022 new_size = TYPE_ADA_SIZE (new_field_type);
1023 else
1024 new_size = DECL_SIZE (old_field);
1025
1026 new_field
1027 = create_field_decl (DECL_NAME (old_field), new_field_type, new_type,
1028 new_size, bit_position (old_field),
1029 TYPE_PACKED (type),
1030 !DECL_NONADDRESSABLE_P (old_field));
1031
1032 DECL_INTERNAL_P (new_field) = DECL_INTERNAL_P (old_field);
1033 SET_DECL_ORIGINAL_FIELD_TO_FIELD (new_field, old_field);
1034 if (TREE_CODE (new_type) == QUAL_UNION_TYPE)
1035 DECL_QUALIFIER (new_field) = DECL_QUALIFIER (old_field);
1036
1037 DECL_CHAIN (new_field) = field_list;
1038 field_list = new_field;
1039 }
1040
1041 finish_record_type (new_type, nreverse (field_list), 2, false);
1042 relate_alias_sets (new_type, type, ALIAS_SET_COPY);
62092ba6 1043 if (TYPE_STUB_DECL (type))
1044 SET_DECL_PARALLEL_TYPE (TYPE_STUB_DECL (new_type),
1045 DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type)));
76cb9822 1046
1047 /* If this is a padding record, we never want to make the size smaller
1048 than what was specified. For QUAL_UNION_TYPE, also copy the size. */
1049 if (TYPE_IS_PADDING_P (type) || TREE_CODE (type) == QUAL_UNION_TYPE)
1050 {
1051 TYPE_SIZE (new_type) = TYPE_SIZE (type);
1052 TYPE_SIZE_UNIT (new_type) = TYPE_SIZE_UNIT (type);
1053 new_size = size;
1054 }
1055 else
1056 {
1057 TYPE_SIZE (new_type) = bitsize_int (new_size);
1058 TYPE_SIZE_UNIT (new_type)
1059 = size_int ((new_size + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
1060 }
1061
1062 if (!TYPE_CONTAINS_TEMPLATE_P (type))
1063 SET_TYPE_ADA_SIZE (new_type, TYPE_ADA_SIZE (type));
1064
1065 compute_record_mode (new_type);
1066
1067 /* Try harder to get a packable type if necessary, for example
1068 in case the record itself contains a BLKmode field. */
1069 if (in_record && TYPE_MODE (new_type) == BLKmode)
1070 SET_TYPE_MODE (new_type,
1071 mode_for_size_tree (TYPE_SIZE (new_type), MODE_INT, 1));
1072
1073 /* If neither the mode nor the size has shrunk, return the old type. */
1074 if (TYPE_MODE (new_type) == BLKmode && new_size >= size)
1075 return type;
1076
1077 return new_type;
1078}
1079
1080/* Given a type TYPE, return a new type whose size is appropriate for SIZE.
1081 If TYPE is the best type, return it. Otherwise, make a new type. We
1082 only support new integral and pointer types. FOR_BIASED is true if
1083 we are making a biased type. */
1084
1085tree
1086make_type_from_size (tree type, tree size_tree, bool for_biased)
1087{
1088 unsigned HOST_WIDE_INT size;
1089 bool biased_p;
1090 tree new_type;
1091
1092 /* If size indicates an error, just return TYPE to avoid propagating
1093 the error. Likewise if it's too large to represent. */
e913b5cd 1094 if (!size_tree || !tree_fits_uhwi_p (size_tree))
76cb9822 1095 return type;
1096
e913b5cd 1097 size = tree_to_uhwi (size_tree);
76cb9822 1098
1099 switch (TREE_CODE (type))
1100 {
1101 case INTEGER_TYPE:
1102 case ENUMERAL_TYPE:
1103 case BOOLEAN_TYPE:
1104 biased_p = (TREE_CODE (type) == INTEGER_TYPE
1105 && TYPE_BIASED_REPRESENTATION_P (type));
1106
1107 /* Integer types with precision 0 are forbidden. */
1108 if (size == 0)
1109 size = 1;
1110
1111 /* Only do something if the type isn't a packed array type and doesn't
1112 already have the proper size and the size isn't too large. */
1113 if (TYPE_IS_PACKED_ARRAY_TYPE_P (type)
1114 || (TYPE_PRECISION (type) == size && biased_p == for_biased)
1115 || size > LONG_LONG_TYPE_SIZE)
1116 break;
1117
1118 biased_p |= for_biased;
1119 if (TYPE_UNSIGNED (type) || biased_p)
1120 new_type = make_unsigned_type (size);
1121 else
1122 new_type = make_signed_type (size);
1123 TREE_TYPE (new_type) = TREE_TYPE (type) ? TREE_TYPE (type) : type;
6fa4bf38 1124 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_MIN_VALUE (type));
1125 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_MAX_VALUE (type));
76cb9822 1126 /* Copy the name to show that it's essentially the same type and
1127 not a subrange type. */
1128 TYPE_NAME (new_type) = TYPE_NAME (type);
1129 TYPE_BIASED_REPRESENTATION_P (new_type) = biased_p;
1130 SET_TYPE_RM_SIZE (new_type, bitsize_int (size));
1131 return new_type;
1132
1133 case RECORD_TYPE:
1134 /* Do something if this is a fat pointer, in which case we
1135 may need to return the thin pointer. */
1136 if (TYPE_FAT_POINTER_P (type) && size < POINTER_SIZE * 2)
1137 {
3754d046 1138 machine_mode p_mode = mode_for_size (size, MODE_INT, 0);
76cb9822 1139 if (!targetm.valid_pointer_mode (p_mode))
1140 p_mode = ptr_mode;
1141 return
1142 build_pointer_type_for_mode
1143 (TYPE_OBJECT_RECORD_TYPE (TYPE_UNCONSTRAINED_ARRAY (type)),
1144 p_mode, 0);
1145 }
1146 break;
1147
1148 case POINTER_TYPE:
1149 /* Only do something if this is a thin pointer, in which case we
1150 may need to return the fat pointer. */
1151 if (TYPE_IS_THIN_POINTER_P (type) && size >= POINTER_SIZE * 2)
1152 return
1153 build_pointer_type (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)));
1154 break;
1155
1156 default:
1157 break;
1158 }
1159
1160 return type;
1161}
1162
1163/* See if the data pointed to by the hash table slot is marked. */
1164
99378011 1165int
1166pad_type_hasher::keep_cache_entry (pad_type_hash *&t)
76cb9822 1167{
99378011 1168 return ggc_marked_p (t->type);
76cb9822 1169}
1170
d1023d12 1171/* Return true iff the padded types are equivalent. */
76cb9822 1172
d1023d12 1173bool
1174pad_type_hasher::equal (pad_type_hash *t1, pad_type_hash *t2)
76cb9822 1175{
76cb9822 1176 tree type1, type2;
1177
1178 if (t1->hash != t2->hash)
1179 return 0;
1180
1181 type1 = t1->type;
1182 type2 = t2->type;
1183
1184 /* We consider that the padded types are equivalent if they pad the same
1185 type and have the same size, alignment and RM size. Taking the mode
1186 into account is redundant since it is determined by the others. */
1187 return
1188 TREE_TYPE (TYPE_FIELDS (type1)) == TREE_TYPE (TYPE_FIELDS (type2))
1189 && TYPE_SIZE (type1) == TYPE_SIZE (type2)
1190 && TYPE_ALIGN (type1) == TYPE_ALIGN (type2)
1191 && TYPE_ADA_SIZE (type1) == TYPE_ADA_SIZE (type2);
1192}
1193
c6ec89a6 1194/* Look up the padded TYPE in the hash table and return its canonical version
1195 if it exists; otherwise, insert it into the hash table. */
1196
1197static tree
1198lookup_and_insert_pad_type (tree type)
1199{
1200 hashval_t hashcode;
1201 struct pad_type_hash in, *h;
c6ec89a6 1202
1203 hashcode
1204 = iterative_hash_object (TYPE_HASH (TREE_TYPE (TYPE_FIELDS (type))), 0);
1205 hashcode = iterative_hash_expr (TYPE_SIZE (type), hashcode);
1206 hashcode = iterative_hash_hashval_t (TYPE_ALIGN (type), hashcode);
1207 hashcode = iterative_hash_expr (TYPE_ADA_SIZE (type), hashcode);
1208
1209 in.hash = hashcode;
1210 in.type = type;
d1023d12 1211 h = pad_type_hash_table->find_with_hash (&in, hashcode);
c6ec89a6 1212 if (h)
1213 return h->type;
1214
1215 h = ggc_alloc<pad_type_hash> ();
1216 h->hash = hashcode;
1217 h->type = type;
d1023d12 1218 *pad_type_hash_table->find_slot_with_hash (h, hashcode, INSERT) = h;
c6ec89a6 1219 return NULL_TREE;
1220}
1221
76cb9822 1222/* Ensure that TYPE has SIZE and ALIGN. Make and return a new padded type
c6ec89a6 1223 if needed. We have already verified that SIZE and ALIGN are large enough.
76cb9822 1224 GNAT_ENTITY is used to name the resulting record and to issue a warning.
1225 IS_COMPONENT_TYPE is true if this is being done for the component type of
1226 an array. IS_USER_TYPE is true if the original type needs to be completed.
1227 DEFINITION is true if this type is being defined. SET_RM_SIZE is true if
1228 the RM size of the resulting type is to be set to SIZE too. */
1229
1230tree
1231maybe_pad_type (tree type, tree size, unsigned int align,
1232 Entity_Id gnat_entity, bool is_component_type,
1233 bool is_user_type, bool definition, bool set_rm_size)
1234{
1235 tree orig_size = TYPE_SIZE (type);
62092ba6 1236 unsigned int orig_align = TYPE_ALIGN (type);
76cb9822 1237 tree record, field;
1238
1239 /* If TYPE is a padded type, see if it agrees with any size and alignment
1240 we were given. If so, return the original type. Otherwise, strip
1241 off the padding, since we will either be returning the inner type
1242 or repadding it. If no size or alignment is specified, use that of
1243 the original padded type. */
1244 if (TYPE_IS_PADDING_P (type))
1245 {
1246 if ((!size
62092ba6 1247 || operand_equal_p (round_up (size, orig_align), orig_size, 0))
1248 && (align == 0 || align == orig_align))
76cb9822 1249 return type;
1250
1251 if (!size)
62092ba6 1252 size = orig_size;
76cb9822 1253 if (align == 0)
62092ba6 1254 align = orig_align;
76cb9822 1255
1256 type = TREE_TYPE (TYPE_FIELDS (type));
1257 orig_size = TYPE_SIZE (type);
62092ba6 1258 orig_align = TYPE_ALIGN (type);
76cb9822 1259 }
1260
1261 /* If the size is either not being changed or is being made smaller (which
1262 is not done here and is only valid for bitfields anyway), show the size
1263 isn't changing. Likewise, clear the alignment if it isn't being
1264 changed. Then return if we aren't doing anything. */
1265 if (size
1266 && (operand_equal_p (size, orig_size, 0)
1267 || (TREE_CODE (orig_size) == INTEGER_CST
1268 && tree_int_cst_lt (size, orig_size))))
1269 size = NULL_TREE;
1270
62092ba6 1271 if (align == orig_align)
76cb9822 1272 align = 0;
1273
1274 if (align == 0 && !size)
1275 return type;
1276
1277 /* If requested, complete the original type and give it a name. */
1278 if (is_user_type)
1279 create_type_decl (get_entity_name (gnat_entity), type,
081f18cf 1280 !Comes_From_Source (gnat_entity),
76cb9822 1281 !(TYPE_NAME (type)
1282 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1283 && DECL_IGNORED_P (TYPE_NAME (type))),
1284 gnat_entity);
1285
1286 /* We used to modify the record in place in some cases, but that could
1287 generate incorrect debugging information. So make a new record
1288 type and name. */
1289 record = make_node (RECORD_TYPE);
1290 TYPE_PADDING_P (record) = 1;
1291
1292 if (Present (gnat_entity))
1293 TYPE_NAME (record) = create_concat_name (gnat_entity, "PAD");
1294
62092ba6 1295 TYPE_ALIGN (record) = align ? align : orig_align;
76cb9822 1296 TYPE_SIZE (record) = size ? size : orig_size;
1297 TYPE_SIZE_UNIT (record)
1298 = convert (sizetype,
1299 size_binop (CEIL_DIV_EXPR, TYPE_SIZE (record),
1300 bitsize_unit_node));
1301
1302 /* If we are changing the alignment and the input type is a record with
1303 BLKmode and a small constant size, try to make a form that has an
1304 integral mode. This might allow the padding record to also have an
1305 integral mode, which will be much more efficient. There is no point
1306 in doing so if a size is specified unless it is also a small constant
1307 size and it is incorrect to do so if we cannot guarantee that the mode
1308 will be naturally aligned since the field must always be addressable.
1309
1310 ??? This might not always be a win when done for a stand-alone object:
1311 since the nominal and the effective type of the object will now have
1312 different modes, a VIEW_CONVERT_EXPR will be required for converting
1313 between them and it might be hard to overcome afterwards, including
1314 at the RTL level when the stand-alone object is accessed as a whole. */
1315 if (align != 0
1316 && RECORD_OR_UNION_TYPE_P (type)
1317 && TYPE_MODE (type) == BLKmode
1318 && !TYPE_BY_REFERENCE_P (type)
1319 && TREE_CODE (orig_size) == INTEGER_CST
1320 && !TREE_OVERFLOW (orig_size)
1321 && compare_tree_int (orig_size, MAX_FIXED_MODE_SIZE) <= 0
1322 && (!size
1323 || (TREE_CODE (size) == INTEGER_CST
1324 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) <= 0)))
1325 {
1326 tree packable_type = make_packable_type (type, true);
1327 if (TYPE_MODE (packable_type) != BLKmode
1328 && align >= TYPE_ALIGN (packable_type))
1329 type = packable_type;
1330 }
1331
1332 /* Now create the field with the original size. */
783c4ee3 1333 field = create_field_decl (get_identifier ("F"), type, record, orig_size,
1334 bitsize_zero_node, 0, 1);
76cb9822 1335 DECL_INTERNAL_P (field) = 1;
1336
1337 /* Do not emit debug info until after the auxiliary record is built. */
1338 finish_record_type (record, field, 1, false);
1339
1340 /* Set the RM size if requested. */
1341 if (set_rm_size)
1342 {
c6ec89a6 1343 tree canonical_pad_type;
1344
76cb9822 1345 SET_TYPE_ADA_SIZE (record, size ? size : orig_size);
1346
1347 /* If the padded type is complete and has constant size, we canonicalize
1348 it by means of the hash table. This is consistent with the language
1349 semantics and ensures that gigi and the middle-end have a common view
1350 of these padded types. */
c6ec89a6 1351 if (TREE_CONSTANT (TYPE_SIZE (record))
1352 && (canonical_pad_type = lookup_and_insert_pad_type (record)))
76cb9822 1353 {
c6ec89a6 1354 record = canonical_pad_type;
1355 goto built;
76cb9822 1356 }
1357 }
1358
1359 /* Unless debugging information isn't being written for the input type,
1360 write a record that shows what we are a subtype of and also make a
1361 variable that indicates our size, if still variable. */
1362 if (TREE_CODE (orig_size) != INTEGER_CST
1363 && TYPE_NAME (record)
1364 && TYPE_NAME (type)
1365 && !(TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1366 && DECL_IGNORED_P (TYPE_NAME (type))))
1367 {
48dcce25 1368 tree name = TYPE_IDENTIFIER (record);
4905002b 1369 tree size_unit = TYPE_SIZE_UNIT (record);
1370
1371 /* A variable that holds the size is required even with no encoding since
1372 it will be referenced by debugging information attributes. At global
1373 level, we need a single variable across all translation units. */
1374 if (size
1375 && TREE_CODE (size) != INTEGER_CST
1376 && (definition || global_bindings_p ()))
1377 {
1378 size_unit
1379 = create_var_decl (concat_name (name, "XVZ"), NULL_TREE, sizetype,
1380 size_unit, true, global_bindings_p (),
1381 !definition && global_bindings_p (), false,
1382 true, true, NULL, gnat_entity);
1383 TYPE_SIZE_UNIT (record) = size_unit;
1384 }
1385
1386 tree marker = make_node (RECORD_TYPE);
48dcce25 1387 tree orig_name = TYPE_IDENTIFIER (type);
76cb9822 1388
1389 TYPE_NAME (marker) = concat_name (name, "XVS");
1390 finish_record_type (marker,
1391 create_field_decl (orig_name,
1392 build_reference_type (type),
1393 marker, NULL_TREE, NULL_TREE,
1394 0, 0),
1395 0, true);
4905002b 1396 TYPE_SIZE_UNIT (marker) = size_unit;
76cb9822 1397
1398 add_parallel_type (record, marker);
76cb9822 1399 }
1400
1401 rest_of_record_type_compilation (record);
1402
1403built:
1404 /* If the size was widened explicitly, maybe give a warning. Take the
1405 original size as the maximum size of the input if there was an
1406 unconstrained record involved and round it up to the specified alignment,
1407 if one was specified. But don't do it if we are just annotating types
1408 and the type is tagged, since tagged types aren't fully laid out in this
1409 mode. */
1410 if (!size
1411 || TREE_CODE (size) == COND_EXPR
1412 || TREE_CODE (size) == MAX_EXPR
1413 || No (gnat_entity)
1414 || (type_annotate_only && Is_Tagged_Type (Etype (gnat_entity))))
1415 return record;
1416
1417 if (CONTAINS_PLACEHOLDER_P (orig_size))
1418 orig_size = max_size (orig_size, true);
1419
108d967d 1420 if (align && AGGREGATE_TYPE_P (type))
76cb9822 1421 orig_size = round_up (orig_size, align);
1422
1423 if (!operand_equal_p (size, orig_size, 0)
1424 && !(TREE_CODE (size) == INTEGER_CST
1425 && TREE_CODE (orig_size) == INTEGER_CST
1426 && (TREE_OVERFLOW (size)
1427 || TREE_OVERFLOW (orig_size)
1428 || tree_int_cst_lt (size, orig_size))))
1429 {
1430 Node_Id gnat_error_node = Empty;
1431
7c9243cb 1432 /* For a packed array, post the message on the original array type. */
1433 if (Is_Packed_Array_Impl_Type (gnat_entity))
76cb9822 1434 gnat_entity = Original_Array_Type (gnat_entity);
1435
1436 if ((Ekind (gnat_entity) == E_Component
1437 || Ekind (gnat_entity) == E_Discriminant)
1438 && Present (Component_Clause (gnat_entity)))
1439 gnat_error_node = Last_Bit (Component_Clause (gnat_entity));
1440 else if (Present (Size_Clause (gnat_entity)))
1441 gnat_error_node = Expression (Size_Clause (gnat_entity));
1442
1443 /* Generate message only for entities that come from source, since
1444 if we have an entity created by expansion, the message will be
1445 generated for some other corresponding source entity. */
1446 if (Comes_From_Source (gnat_entity))
1447 {
1448 if (Present (gnat_error_node))
1449 post_error_ne_tree ("{^ }bits of & unused?",
1450 gnat_error_node, gnat_entity,
1451 size_diffop (size, orig_size));
1452 else if (is_component_type)
1453 post_error_ne_tree ("component of& padded{ by ^ bits}?",
1454 gnat_entity, gnat_entity,
1455 size_diffop (size, orig_size));
1456 }
1457 }
1458
1459 return record;
1460}
1461\f
1462/* Relate the alias sets of GNU_NEW_TYPE and GNU_OLD_TYPE according to OP.
1463 If this is a multi-dimensional array type, do this recursively.
1464
1465 OP may be
1466 - ALIAS_SET_COPY: the new set is made a copy of the old one.
1467 - ALIAS_SET_SUPERSET: the new set is made a superset of the old one.
1468 - ALIAS_SET_SUBSET: the new set is made a subset of the old one. */
1469
1470void
1471relate_alias_sets (tree gnu_new_type, tree gnu_old_type, enum alias_set_op op)
1472{
1473 /* Remove any padding from GNU_OLD_TYPE. It doesn't matter in the case
1474 of a one-dimensional array, since the padding has the same alias set
1475 as the field type, but if it's a multi-dimensional array, we need to
1476 see the inner types. */
1477 while (TREE_CODE (gnu_old_type) == RECORD_TYPE
1478 && (TYPE_JUSTIFIED_MODULAR_P (gnu_old_type)
1479 || TYPE_PADDING_P (gnu_old_type)))
1480 gnu_old_type = TREE_TYPE (TYPE_FIELDS (gnu_old_type));
1481
1482 /* Unconstrained array types are deemed incomplete and would thus be given
1483 alias set 0. Retrieve the underlying array type. */
1484 if (TREE_CODE (gnu_old_type) == UNCONSTRAINED_ARRAY_TYPE)
1485 gnu_old_type
1486 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_old_type))));
1487 if (TREE_CODE (gnu_new_type) == UNCONSTRAINED_ARRAY_TYPE)
1488 gnu_new_type
1489 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_new_type))));
1490
1491 if (TREE_CODE (gnu_new_type) == ARRAY_TYPE
1492 && TREE_CODE (TREE_TYPE (gnu_new_type)) == ARRAY_TYPE
1493 && TYPE_MULTI_ARRAY_P (TREE_TYPE (gnu_new_type)))
1494 relate_alias_sets (TREE_TYPE (gnu_new_type), TREE_TYPE (gnu_old_type), op);
1495
1496 switch (op)
1497 {
1498 case ALIAS_SET_COPY:
1499 /* The alias set shouldn't be copied between array types with different
1500 aliasing settings because this can break the aliasing relationship
1501 between the array type and its element type. */
1502#ifndef ENABLE_CHECKING
1503 if (flag_strict_aliasing)
1504#endif
1505 gcc_assert (!(TREE_CODE (gnu_new_type) == ARRAY_TYPE
1506 && TREE_CODE (gnu_old_type) == ARRAY_TYPE
1507 && TYPE_NONALIASED_COMPONENT (gnu_new_type)
1508 != TYPE_NONALIASED_COMPONENT (gnu_old_type)));
1509
1510 TYPE_ALIAS_SET (gnu_new_type) = get_alias_set (gnu_old_type);
1511 break;
1512
1513 case ALIAS_SET_SUBSET:
1514 case ALIAS_SET_SUPERSET:
1515 {
1516 alias_set_type old_set = get_alias_set (gnu_old_type);
1517 alias_set_type new_set = get_alias_set (gnu_new_type);
1518
1519 /* Do nothing if the alias sets conflict. This ensures that we
1520 never call record_alias_subset several times for the same pair
1521 or at all for alias set 0. */
1522 if (!alias_sets_conflict_p (old_set, new_set))
1523 {
1524 if (op == ALIAS_SET_SUBSET)
1525 record_alias_subset (old_set, new_set);
1526 else
1527 record_alias_subset (new_set, old_set);
1528 }
1529 }
1530 break;
1531
1532 default:
1533 gcc_unreachable ();
1534 }
1535
1536 record_component_aliases (gnu_new_type);
1537}
1538\f
55052087 1539/* Record TYPE as a builtin type for Ada. NAME is the name of the type.
4905002b 1540 ARTIFICIAL_P is true if the type was generated by the compiler. */
27becfc8 1541
1542void
55052087 1543record_builtin_type (const char *name, tree type, bool artificial_p)
27becfc8 1544{
f3c9930a 1545 tree type_decl = build_decl (input_location,
1546 TYPE_DECL, get_identifier (name), type);
55052087 1547 DECL_ARTIFICIAL (type_decl) = artificial_p;
5bf971ee 1548 TYPE_ARTIFICIAL (type) = artificial_p;
515c6c6c 1549 gnat_pushdecl (type_decl, Empty);
27becfc8 1550
515c6c6c 1551 if (debug_hooks->type_decl)
1552 debug_hooks->type_decl (type_decl, false);
27becfc8 1553}
1554\f
41dd28aa 1555/* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1556 finish constructing the record type as a fat pointer type. */
1557
1558void
1559finish_fat_pointer_type (tree record_type, tree field_list)
1560{
1561 /* Make sure we can put it into a register. */
0ade479e 1562 if (STRICT_ALIGNMENT)
1563 TYPE_ALIGN (record_type) = MIN (BIGGEST_ALIGNMENT, 2 * POINTER_SIZE);
41dd28aa 1564
1565 /* Show what it really is. */
1566 TYPE_FAT_POINTER_P (record_type) = 1;
1567
1568 /* Do not emit debug info for it since the types of its fields may still be
1569 incomplete at this point. */
1570 finish_record_type (record_type, field_list, 0, false);
1571
1572 /* Force type_contains_placeholder_p to return true on it. Although the
1573 PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
1574 type but the representation of the unconstrained array. */
1575 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type) = 2;
1576}
1577
f9001da7 1578/* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
27becfc8 1579 finish constructing the record or union type. If REP_LEVEL is zero, this
1580 record has no representation clause and so will be entirely laid out here.
1581 If REP_LEVEL is one, this record has a representation clause and has been
1582 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
1583 this record is derived from a parent record and thus inherits its layout;
f9001da7 1584 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
1585 we need to write debug information about this type. */
27becfc8 1586
1587void
f9001da7 1588finish_record_type (tree record_type, tree field_list, int rep_level,
1589 bool debug_info_p)
27becfc8 1590{
1591 enum tree_code code = TREE_CODE (record_type);
48dcce25 1592 tree name = TYPE_IDENTIFIER (record_type);
27becfc8 1593 tree ada_size = bitsize_zero_node;
1594 tree size = bitsize_zero_node;
1595 bool had_size = TYPE_SIZE (record_type) != 0;
1596 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
1597 bool had_align = TYPE_ALIGN (record_type) != 0;
1598 tree field;
1599
f9001da7 1600 TYPE_FIELDS (record_type) = field_list;
27becfc8 1601
515c6c6c 1602 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
1603 generate debug info and have a parallel type. */
515c6c6c 1604 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
27becfc8 1605
1606 /* Globally initialize the record first. If this is a rep'ed record,
1607 that just means some initializations; otherwise, layout the record. */
1608 if (rep_level > 0)
1609 {
1610 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
27becfc8 1611
1612 if (!had_size_unit)
1613 TYPE_SIZE_UNIT (record_type) = size_zero_node;
02433bf7 1614
27becfc8 1615 if (!had_size)
1616 TYPE_SIZE (record_type) = bitsize_zero_node;
1617
1618 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
1619 out just like a UNION_TYPE, since the size will be fixed. */
1620 else if (code == QUAL_UNION_TYPE)
1621 code = UNION_TYPE;
1622 }
1623 else
1624 {
1625 /* Ensure there isn't a size already set. There can be in an error
1626 case where there is a rep clause but all fields have errors and
1627 no longer have a position. */
1628 TYPE_SIZE (record_type) = 0;
81f9f420 1629
1630 /* Ensure we use the traditional GCC layout for bitfields when we need
1631 to pack the record type or have a representation clause. The other
1632 possible layout (Microsoft C compiler), if available, would prevent
1633 efficient packing in almost all cases. */
1634#ifdef TARGET_MS_BITFIELD_LAYOUT
1635 if (TARGET_MS_BITFIELD_LAYOUT && TYPE_PACKED (record_type))
1636 decl_attributes (&record_type,
1637 tree_cons (get_identifier ("gcc_struct"),
1638 NULL_TREE, NULL_TREE),
1639 ATTR_FLAG_TYPE_IN_PLACE);
1640#endif
1641
27becfc8 1642 layout_type (record_type);
1643 }
1644
1645 /* At this point, the position and size of each field is known. It was
1646 either set before entry by a rep clause, or by laying out the type above.
1647
1648 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
1649 to compute the Ada size; the GCC size and alignment (for rep'ed records
1650 that are not padding types); and the mode (for rep'ed records). We also
1651 clear the DECL_BIT_FIELD indication for the cases we know have not been
1652 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
1653
1654 if (code == QUAL_UNION_TYPE)
f9001da7 1655 field_list = nreverse (field_list);
27becfc8 1656
1767a056 1657 for (field = field_list; field; field = DECL_CHAIN (field))
27becfc8 1658 {
1659 tree type = TREE_TYPE (field);
1660 tree pos = bit_position (field);
1661 tree this_size = DECL_SIZE (field);
1662 tree this_ada_size;
1663
4a17ee95 1664 if (RECORD_OR_UNION_TYPE_P (type)
a98f6bec 1665 && !TYPE_FAT_POINTER_P (type)
27becfc8 1666 && !TYPE_CONTAINS_TEMPLATE_P (type)
1667 && TYPE_ADA_SIZE (type))
1668 this_ada_size = TYPE_ADA_SIZE (type);
1669 else
1670 this_ada_size = this_size;
1671
1672 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
1673 if (DECL_BIT_FIELD (field)
1674 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
1675 {
1676 unsigned int align = TYPE_ALIGN (type);
1677
1678 /* In the general case, type alignment is required. */
1679 if (value_factor_p (pos, align))
1680 {
1681 /* The enclosing record type must be sufficiently aligned.
1682 Otherwise, if no alignment was specified for it and it
1683 has been laid out already, bump its alignment to the
1684 desired one if this is compatible with its size. */
1685 if (TYPE_ALIGN (record_type) >= align)
1686 {
1687 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
1688 DECL_BIT_FIELD (field) = 0;
1689 }
1690 else if (!had_align
1691 && rep_level == 0
1692 && value_factor_p (TYPE_SIZE (record_type), align))
1693 {
1694 TYPE_ALIGN (record_type) = align;
1695 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
1696 DECL_BIT_FIELD (field) = 0;
1697 }
1698 }
1699
1700 /* In the non-strict alignment case, only byte alignment is. */
1701 if (!STRICT_ALIGNMENT
1702 && DECL_BIT_FIELD (field)
1703 && value_factor_p (pos, BITS_PER_UNIT))
1704 DECL_BIT_FIELD (field) = 0;
1705 }
1706
211df513 1707 /* If we still have DECL_BIT_FIELD set at this point, we know that the
1708 field is technically not addressable. Except that it can actually
1709 be addressed if it is BLKmode and happens to be properly aligned. */
1710 if (DECL_BIT_FIELD (field)
1711 && !(DECL_MODE (field) == BLKmode
1712 && value_factor_p (pos, BITS_PER_UNIT)))
1713 DECL_NONADDRESSABLE_P (field) = 1;
27becfc8 1714
1715 /* A type must be as aligned as its most aligned field that is not
1716 a bit-field. But this is already enforced by layout_type. */
1717 if (rep_level > 0 && !DECL_BIT_FIELD (field))
1718 TYPE_ALIGN (record_type)
1719 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
1720
1721 switch (code)
1722 {
1723 case UNION_TYPE:
1724 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
1725 size = size_binop (MAX_EXPR, size, this_size);
1726 break;
1727
1728 case QUAL_UNION_TYPE:
1729 ada_size
1730 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1731 this_ada_size, ada_size);
1732 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1733 this_size, size);
1734 break;
1735
1736 case RECORD_TYPE:
1737 /* Since we know here that all fields are sorted in order of
1738 increasing bit position, the size of the record is one
1739 higher than the ending bit of the last field processed
1740 unless we have a rep clause, since in that case we might
1741 have a field outside a QUAL_UNION_TYPE that has a higher ending
1742 position. So use a MAX in that case. Also, if this field is a
1743 QUAL_UNION_TYPE, we need to take into account the previous size in
1744 the case of empty variants. */
1745 ada_size
1746 = merge_sizes (ada_size, pos, this_ada_size,
1747 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1748 size
1749 = merge_sizes (size, pos, this_size,
1750 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1751 break;
1752
1753 default:
1754 gcc_unreachable ();
1755 }
1756 }
1757
1758 if (code == QUAL_UNION_TYPE)
f9001da7 1759 nreverse (field_list);
27becfc8 1760
1761 if (rep_level < 2)
1762 {
1763 /* If this is a padding record, we never want to make the size smaller
1764 than what was specified in it, if any. */
a98f6bec 1765 if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
27becfc8 1766 size = TYPE_SIZE (record_type);
1767
1768 /* Now set any of the values we've just computed that apply. */
a98f6bec 1769 if (!TYPE_FAT_POINTER_P (record_type)
27becfc8 1770 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
1771 SET_TYPE_ADA_SIZE (record_type, ada_size);
1772
1773 if (rep_level > 0)
1774 {
1775 tree size_unit = had_size_unit
1776 ? TYPE_SIZE_UNIT (record_type)
1777 : convert (sizetype,
1778 size_binop (CEIL_DIV_EXPR, size,
1779 bitsize_unit_node));
1780 unsigned int align = TYPE_ALIGN (record_type);
1781
1782 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
1783 TYPE_SIZE_UNIT (record_type)
1784 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
1785
1786 compute_record_mode (record_type);
1787 }
1788 }
1789
f9001da7 1790 if (debug_info_p)
27becfc8 1791 rest_of_record_type_compilation (record_type);
1792}
1793
ba502e2b 1794/* Append PARALLEL_TYPE on the chain of parallel types of TYPE. If
1795 PARRALEL_TYPE has no context and its computation is not deferred yet, also
1796 propagate TYPE's context to PARALLEL_TYPE's or defer its propagation to the
1797 moment TYPE will get a context. */
ae16f71d 1798
1799void
1800add_parallel_type (tree type, tree parallel_type)
1801{
1802 tree decl = TYPE_STUB_DECL (type);
1803
1804 while (DECL_PARALLEL_TYPE (decl))
1805 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
1806
1807 SET_DECL_PARALLEL_TYPE (decl, parallel_type);
ba502e2b 1808
1809 /* If PARALLEL_TYPE already has a context, we are done. */
1810 if (TYPE_CONTEXT (parallel_type) != NULL_TREE)
1811 return;
1812
1813 /* Otherwise, try to get one from TYPE's context. */
1814 if (TYPE_CONTEXT (type) != NULL_TREE)
1815 /* TYPE already has a context, so simply propagate it to PARALLEL_TYPE. */
1816 gnat_set_type_context (parallel_type, TYPE_CONTEXT (type));
1817
1818 /* ... otherwise TYPE has not context yet. We know it will thanks to
1819 gnat_pushdecl, and then its context will be propagated to PARALLEL_TYPE.
1820 So we have nothing to do in this case. */
ae16f71d 1821}
1822
1823/* Return true if TYPE has a parallel type. */
1824
1825static bool
1826has_parallel_type (tree type)
1827{
1828 tree decl = TYPE_STUB_DECL (type);
1829
1830 return DECL_PARALLEL_TYPE (decl) != NULL_TREE;
1831}
1832
f9001da7 1833/* Wrap up compilation of RECORD_TYPE, i.e. output all the debug information
1834 associated with it. It need not be invoked directly in most cases since
1835 finish_record_type takes care of doing so, but this can be necessary if
1836 a parallel type is to be attached to the record type. */
27becfc8 1837
1838void
1839rest_of_record_type_compilation (tree record_type)
1840{
27becfc8 1841 bool var_size = false;
96e7147a 1842 tree field;
27becfc8 1843
96e7147a 1844 /* If this is a padded type, the bulk of the debug info has already been
1845 generated for the field's type. */
1846 if (TYPE_IS_PADDING_P (record_type))
1847 return;
1848
ae16f71d 1849 /* If the type already has a parallel type (XVS type), then we're done. */
1850 if (has_parallel_type (record_type))
1851 return;
1852
96e7147a 1853 for (field = TYPE_FIELDS (record_type); field; field = DECL_CHAIN (field))
27becfc8 1854 {
1855 /* We need to make an XVE/XVU record if any field has variable size,
1856 whether or not the record does. For example, if we have a union,
1857 it may be that all fields, rounded up to the alignment, have the
1858 same size, in which case we'll use that size. But the debug
1859 output routines (except Dwarf2) won't be able to output the fields,
1860 so we need to make the special record. */
1861 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
1862 /* If a field has a non-constant qualifier, the record will have
1863 variable size too. */
96e7147a 1864 || (TREE_CODE (record_type) == QUAL_UNION_TYPE
27becfc8 1865 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
1866 {
1867 var_size = true;
1868 break;
1869 }
1870 }
1871
96e7147a 1872 /* If this record type is of variable size, make a parallel record type that
1873 will tell the debugger how the former is laid out (see exp_dbug.ads). */
1874 if (var_size)
27becfc8 1875 {
1876 tree new_record_type
1877 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
1878 ? UNION_TYPE : TREE_CODE (record_type));
48dcce25 1879 tree orig_name = TYPE_IDENTIFIER (record_type), new_name;
27becfc8 1880 tree last_pos = bitsize_zero_node;
e3698827 1881 tree old_field, prev_old_field = NULL_TREE;
27becfc8 1882
e3698827 1883 new_name
1884 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
1885 ? "XVU" : "XVE");
1886 TYPE_NAME (new_record_type) = new_name;
27becfc8 1887 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
1888 TYPE_STUB_DECL (new_record_type)
e3698827 1889 = create_type_stub_decl (new_name, new_record_type);
27becfc8 1890 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
1891 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
1892 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
1893 TYPE_SIZE_UNIT (new_record_type)
1894 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
1895
819ab09a 1896 /* Now scan all the fields, replacing each field with a new field
1897 corresponding to the new encoding. */
27becfc8 1898 for (old_field = TYPE_FIELDS (record_type); old_field;
1767a056 1899 old_field = DECL_CHAIN (old_field))
27becfc8 1900 {
1901 tree field_type = TREE_TYPE (old_field);
1902 tree field_name = DECL_NAME (old_field);
27becfc8 1903 tree curpos = bit_position (old_field);
819ab09a 1904 tree pos, new_field;
27becfc8 1905 bool var = false;
1906 unsigned int align = 0;
27becfc8 1907
819ab09a 1908 /* We're going to do some pattern matching below so remove as many
1909 conversions as possible. */
1910 curpos = remove_conversions (curpos, true);
27becfc8 1911
819ab09a 1912 /* See how the position was modified from the last position.
27becfc8 1913
819ab09a 1914 There are two basic cases we support: a value was added
1915 to the last position or the last position was rounded to
1916 a boundary and they something was added. Check for the
1917 first case first. If not, see if there is any evidence
1918 of rounding. If so, round the last position and retry.
27becfc8 1919
819ab09a 1920 If this is a union, the position can be taken as zero. */
27becfc8 1921 if (TREE_CODE (new_record_type) == UNION_TYPE)
819ab09a 1922 pos = bitsize_zero_node;
27becfc8 1923 else
1924 pos = compute_related_constant (curpos, last_pos);
1925
819ab09a 1926 if (!pos
1927 && TREE_CODE (curpos) == MULT_EXPR
e913b5cd 1928 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1)))
27becfc8 1929 {
1930 tree offset = TREE_OPERAND (curpos, 0);
e913b5cd 1931 align = tree_to_uhwi (TREE_OPERAND (curpos, 1));
819ab09a 1932 align = scale_by_factor_of (offset, align);
1933 last_pos = round_up (last_pos, align);
1934 pos = compute_related_constant (curpos, last_pos);
27becfc8 1935 }
819ab09a 1936 else if (!pos
1937 && TREE_CODE (curpos) == PLUS_EXPR
6b409616 1938 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1))
27becfc8 1939 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
6b409616 1940 && tree_fits_uhwi_p
1941 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1)))
27becfc8 1942 {
819ab09a 1943 tree offset = TREE_OPERAND (TREE_OPERAND (curpos, 0), 0);
1944 unsigned HOST_WIDE_INT addend
6b409616 1945 = tree_to_uhwi (TREE_OPERAND (curpos, 1));
27becfc8 1946 align
6b409616 1947 = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1));
819ab09a 1948 align = scale_by_factor_of (offset, align);
1949 align = MIN (align, addend & -addend);
1950 last_pos = round_up (last_pos, align);
1951 pos = compute_related_constant (curpos, last_pos);
27becfc8 1952 }
819ab09a 1953 else if (potential_alignment_gap (prev_old_field, old_field, pos))
27becfc8 1954 {
1955 align = TYPE_ALIGN (field_type);
819ab09a 1956 last_pos = round_up (last_pos, align);
1957 pos = compute_related_constant (curpos, last_pos);
27becfc8 1958 }
1959
1960 /* If we can't compute a position, set it to zero.
1961
819ab09a 1962 ??? We really should abort here, but it's too much work
1963 to get this correct for all cases. */
27becfc8 1964 if (!pos)
1965 pos = bitsize_zero_node;
1966
1967 /* See if this type is variable-sized and make a pointer type
1968 and indicate the indirection if so. Beware that the debug
1969 back-end may adjust the position computed above according
1970 to the alignment of the field type, i.e. the pointer type
1971 in this case, if we don't preventively counter that. */
1972 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
1973 {
1974 field_type = build_pointer_type (field_type);
1975 if (align != 0 && TYPE_ALIGN (field_type) > align)
1976 {
1977 field_type = copy_node (field_type);
1978 TYPE_ALIGN (field_type) = align;
1979 }
1980 var = true;
1981 }
1982
1983 /* Make a new field name, if necessary. */
1984 if (var || align != 0)
1985 {
1986 char suffix[16];
1987
1988 if (align != 0)
1989 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
1990 align / BITS_PER_UNIT);
1991 else
1992 strcpy (suffix, "XVL");
1993
e3698827 1994 field_name = concat_name (field_name, suffix);
27becfc8 1995 }
1996
d51eba1a 1997 new_field
1998 = create_field_decl (field_name, field_type, new_record_type,
1999 DECL_SIZE (old_field), pos, 0, 0);
1767a056 2000 DECL_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
27becfc8 2001 TYPE_FIELDS (new_record_type) = new_field;
2002
2003 /* If old_field is a QUAL_UNION_TYPE, take its size as being
2004 zero. The only time it's not the last field of the record
2005 is when there are other components at fixed positions after
2006 it (meaning there was a rep clause for every field) and we
2007 want to be able to encode them. */
2008 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
2009 (TREE_CODE (TREE_TYPE (old_field))
2010 == QUAL_UNION_TYPE)
2011 ? bitsize_zero_node
2012 : DECL_SIZE (old_field));
2013 prev_old_field = old_field;
2014 }
2015
96e7147a 2016 TYPE_FIELDS (new_record_type) = nreverse (TYPE_FIELDS (new_record_type));
27becfc8 2017
ae16f71d 2018 add_parallel_type (record_type, new_record_type);
27becfc8 2019 }
27becfc8 2020}
2021
27becfc8 2022/* Utility function of above to merge LAST_SIZE, the previous size of a record
32826d65 2023 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
2024 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
2025 replace a value of zero with the old size. If HAS_REP is true, we take the
2026 MAX of the end position of this field with LAST_SIZE. In all other cases,
2027 we use FIRST_BIT plus SIZE. Return an expression for the size. */
27becfc8 2028
2029static tree
2030merge_sizes (tree last_size, tree first_bit, tree size, bool special,
2031 bool has_rep)
2032{
2033 tree type = TREE_TYPE (last_size);
c88e6a4f 2034 tree new_size;
27becfc8 2035
2036 if (!special || TREE_CODE (size) != COND_EXPR)
2037 {
c88e6a4f 2038 new_size = size_binop (PLUS_EXPR, first_bit, size);
27becfc8 2039 if (has_rep)
c88e6a4f 2040 new_size = size_binop (MAX_EXPR, last_size, new_size);
27becfc8 2041 }
2042
2043 else
c88e6a4f 2044 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
2045 integer_zerop (TREE_OPERAND (size, 1))
2046 ? last_size : merge_sizes (last_size, first_bit,
2047 TREE_OPERAND (size, 1),
2048 1, has_rep),
2049 integer_zerop (TREE_OPERAND (size, 2))
2050 ? last_size : merge_sizes (last_size, first_bit,
2051 TREE_OPERAND (size, 2),
2052 1, has_rep));
27becfc8 2053
2054 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
2055 when fed through substitute_in_expr) into thinking that a constant
2056 size is not constant. */
c88e6a4f 2057 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
2058 new_size = TREE_OPERAND (new_size, 0);
27becfc8 2059
c88e6a4f 2060 return new_size;
27becfc8 2061}
2062
2063/* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
2064 related by the addition of a constant. Return that constant if so. */
2065
2066static tree
2067compute_related_constant (tree op0, tree op1)
2068{
2069 tree op0_var, op1_var;
2070 tree op0_con = split_plus (op0, &op0_var);
2071 tree op1_con = split_plus (op1, &op1_var);
2072 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
2073
2074 if (operand_equal_p (op0_var, op1_var, 0))
2075 return result;
2076 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
2077 return result;
2078 else
2079 return 0;
2080}
2081
2082/* Utility function of above to split a tree OP which may be a sum, into a
2083 constant part, which is returned, and a variable part, which is stored
2084 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
2085 bitsizetype. */
2086
2087static tree
2088split_plus (tree in, tree *pvar)
2089{
7ad4f11d 2090 /* Strip conversions in order to ease the tree traversal and maximize the
2091 potential for constant or plus/minus discovery. We need to be careful
27becfc8 2092 to always return and set *pvar to bitsizetype trees, but it's worth
2093 the effort. */
7ad4f11d 2094 in = remove_conversions (in, false);
27becfc8 2095
2096 *pvar = convert (bitsizetype, in);
2097
2098 if (TREE_CODE (in) == INTEGER_CST)
2099 {
2100 *pvar = bitsize_zero_node;
2101 return convert (bitsizetype, in);
2102 }
2103 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
2104 {
2105 tree lhs_var, rhs_var;
2106 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
2107 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
2108
2109 if (lhs_var == TREE_OPERAND (in, 0)
2110 && rhs_var == TREE_OPERAND (in, 1))
2111 return bitsize_zero_node;
2112
2113 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
2114 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
2115 }
2116 else
2117 return bitsize_zero_node;
2118}
2119\f
4cd5bb61 2120/* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
2121 subprogram. If it is VOID_TYPE, then we are dealing with a procedure,
2122 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
2123 PARM_DECL nodes that are the subprogram parameters. CICO_LIST is the
2124 copy-in/copy-out list to be stored into the TYPE_CICO_LIST field.
2125 RETURN_UNCONSTRAINED_P is true if the function returns an unconstrained
2126 object. RETURN_BY_DIRECT_REF_P is true if the function returns by direct
2127 reference. RETURN_BY_INVISI_REF_P is true if the function returns by
2128 invisible reference. */
27becfc8 2129
2130tree
2131create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
4cd5bb61 2132 bool return_unconstrained_p, bool return_by_direct_ref_p,
2133 bool return_by_invisi_ref_p)
27becfc8 2134{
ce497e33 2135 /* A list of the data type nodes of the subprogram formal parameters.
2136 This list is generated by traversing the input list of PARM_DECL
2137 nodes. */
f1f41a6c 2138 vec<tree, va_gc> *param_type_list = NULL;
4cd5bb61 2139 tree t, type;
27becfc8 2140
1767a056 2141 for (t = param_decl_list; t; t = DECL_CHAIN (t))
f1f41a6c 2142 vec_safe_push (param_type_list, TREE_TYPE (t));
27becfc8 2143
ce497e33 2144 type = build_function_type_vec (return_type, param_type_list);
27becfc8 2145
4cd5bb61 2146 /* TYPE may have been shared since GCC hashes types. If it has a different
2147 CICO_LIST, make a copy. Likewise for the various flags. */
47c154d9 2148 if (!fntype_same_flags_p (type, cico_list, return_unconstrained_p,
2149 return_by_direct_ref_p, return_by_invisi_ref_p))
4cd5bb61 2150 {
2151 type = copy_type (type);
2152 TYPE_CI_CO_LIST (type) = cico_list;
2153 TYPE_RETURN_UNCONSTRAINED_P (type) = return_unconstrained_p;
2154 TYPE_RETURN_BY_DIRECT_REF_P (type) = return_by_direct_ref_p;
2155 TREE_ADDRESSABLE (type) = return_by_invisi_ref_p;
2156 }
27becfc8 2157
27becfc8 2158 return type;
2159}
2160\f
2161/* Return a copy of TYPE but safe to modify in any way. */
2162
2163tree
2164copy_type (tree type)
2165{
c88e6a4f 2166 tree new_type = copy_node (type);
27becfc8 2167
170d361e 2168 /* Unshare the language-specific data. */
2169 if (TYPE_LANG_SPECIFIC (type))
2170 {
2171 TYPE_LANG_SPECIFIC (new_type) = NULL;
2172 SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
2173 }
2174
2175 /* And the contents of the language-specific slot if needed. */
2176 if ((INTEGRAL_TYPE_P (type) || TREE_CODE (type) == REAL_TYPE)
2177 && TYPE_RM_VALUES (type))
2178 {
2179 TYPE_RM_VALUES (new_type) = NULL_TREE;
2180 SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
2181 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
2182 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
2183 }
2184
27becfc8 2185 /* copy_node clears this field instead of copying it, because it is
2186 aliased with TREE_CHAIN. */
c88e6a4f 2187 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
27becfc8 2188
c88e6a4f 2189 TYPE_POINTER_TO (new_type) = 0;
2190 TYPE_REFERENCE_TO (new_type) = 0;
2191 TYPE_MAIN_VARIANT (new_type) = new_type;
2192 TYPE_NEXT_VARIANT (new_type) = 0;
17eb96ba 2193 TYPE_CANONICAL (new_type) = new_type;
27becfc8 2194
c88e6a4f 2195 return new_type;
27becfc8 2196}
2197\f
211df513 2198/* Return a subtype of sizetype with range MIN to MAX and whose
2199 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
2200 of the associated TYPE_DECL. */
27becfc8 2201
2202tree
2203create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
2204{
2205 /* First build a type for the desired range. */
47c154d9 2206 tree type = build_nonshared_range_type (sizetype, min, max);
27becfc8 2207
47c154d9 2208 /* Then set the index type. */
27becfc8 2209 SET_TYPE_INDEX_TYPE (type, index);
081f18cf 2210 create_type_decl (NULL_TREE, type, true, false, gnat_node);
211df513 2211
27becfc8 2212 return type;
2213}
a9538d68 2214
2215/* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
2216 sizetype is used. */
2217
2218tree
2219create_range_type (tree type, tree min, tree max)
2220{
2221 tree range_type;
2222
2223 if (type == NULL_TREE)
2224 type = sizetype;
2225
2226 /* First build a type with the base range. */
47c154d9 2227 range_type = build_nonshared_range_type (type, TYPE_MIN_VALUE (type),
2228 TYPE_MAX_VALUE (type));
a9538d68 2229
2230 /* Then set the actual range. */
6fa4bf38 2231 SET_TYPE_RM_MIN_VALUE (range_type, min);
2232 SET_TYPE_RM_MAX_VALUE (range_type, max);
a9538d68 2233
2234 return range_type;
2235}
27becfc8 2236\f
7a0ae4af 2237/* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of TYPE.
2238 NAME gives the name of the type to be used in the declaration. */
515c6c6c 2239
2240tree
7a0ae4af 2241create_type_stub_decl (tree name, tree type)
515c6c6c 2242{
7a0ae4af 2243 tree type_decl = build_decl (input_location, TYPE_DECL, name, type);
515c6c6c 2244 DECL_ARTIFICIAL (type_decl) = 1;
5bf971ee 2245 TYPE_ARTIFICIAL (type) = 1;
515c6c6c 2246 return type_decl;
2247}
2248
7a0ae4af 2249/* Return a TYPE_DECL node for TYPE. NAME gives the name of the type to be
2250 used in the declaration. ARTIFICIAL_P is true if the declaration was
2251 generated by the compiler. DEBUG_INFO_P is true if we need to write
2252 debug information about this type. GNAT_NODE is used for the position
2253 of the decl. */
27becfc8 2254
2255tree
7a0ae4af 2256create_type_decl (tree name, tree type, bool artificial_p, bool debug_info_p,
2257 Node_Id gnat_node)
27becfc8 2258{
27becfc8 2259 enum tree_code code = TREE_CODE (type);
7a0ae4af 2260 bool is_named
2261 = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
515c6c6c 2262 tree type_decl;
27becfc8 2263
515c6c6c 2264 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
2265 gcc_assert (!TYPE_IS_DUMMY_P (type));
27becfc8 2266
515c6c6c 2267 /* If the type hasn't been named yet, we're naming it; preserve an existing
2268 TYPE_STUB_DECL that has been attached to it for some purpose. */
7a0ae4af 2269 if (!is_named && TYPE_STUB_DECL (type))
515c6c6c 2270 {
2271 type_decl = TYPE_STUB_DECL (type);
7a0ae4af 2272 DECL_NAME (type_decl) = name;
515c6c6c 2273 }
2274 else
7a0ae4af 2275 type_decl = build_decl (input_location, TYPE_DECL, name, type);
27becfc8 2276
515c6c6c 2277 DECL_ARTIFICIAL (type_decl) = artificial_p;
5bf971ee 2278 TYPE_ARTIFICIAL (type) = artificial_p;
ac45dde2 2279
2280 /* Add this decl to the current binding level. */
515c6c6c 2281 gnat_pushdecl (type_decl, gnat_node);
ac45dde2 2282
a10d3a24 2283 /* If we're naming the type, equate the TYPE_STUB_DECL to the name. This
2284 causes the name to be also viewed as a "tag" by the debug back-end, with
2285 the advantage that no DW_TAG_typedef is emitted for artificial "tagged"
2286 types in DWARF.
2287
2288 Note that if "type" is used as a DECL_ORIGINAL_TYPE, it may be referenced
2289 from multiple contexts, and "type_decl" references a copy of it: in such a
2290 case, do not mess TYPE_STUB_DECL: we do not want to re-use the TYPE_DECL
2291 with the mechanism above. */
7a0ae4af 2292 if (!is_named && type != DECL_ORIGINAL_TYPE (type_decl))
515c6c6c 2293 TYPE_STUB_DECL (type) = type_decl;
2294
3fb9f0d4 2295 /* Do not generate debug info for UNCONSTRAINED_ARRAY_TYPE that the
2296 back-end doesn't support, and for others if we don't need to. */
27becfc8 2297 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
2298 DECL_IGNORED_P (type_decl) = 1;
27becfc8 2299
2300 return type_decl;
2301}
515c6c6c 2302\f
27becfc8 2303/* Return a VAR_DECL or CONST_DECL node.
2304
7a0ae4af 2305 NAME gives the name of the variable. ASM_NAME is its assembler name
2306 (if provided). TYPE is its data type (a GCC ..._TYPE node). INIT is
27becfc8 2307 the GCC tree for an optional initial expression; NULL_TREE if none.
2308
2309 CONST_FLAG is true if this variable is constant, in which case we might
2310 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
2311
2312 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
2313 definition to be made visible outside of the current compilation unit, for
2314 instance variable definitions in a package specification.
2315
32826d65 2316 EXTERN_FLAG is true when processing an external variable declaration (as
27becfc8 2317 opposed to a definition: no storage is to be allocated for the variable).
2318
2319 STATIC_FLAG is only relevant when not at top level. In that case
2320 it indicates whether to always allocate storage to the variable.
2321
4905002b 2322 ARTIFICIAL_P is true if the variable was generated by the compiler.
2323
2324 DEBUG_INFO_P is true if we need to write debug information for it.
2325
27becfc8 2326 GNAT_NODE is used for the position of the decl. */
2327
2328tree
7a0ae4af 2329create_var_decl (tree name, tree asm_name, tree type, tree init,
2330 bool const_flag, bool public_flag, bool extern_flag,
2331 bool static_flag, bool artificial_p, bool debug_info_p,
2332 struct attrib *attr_list, Node_Id gnat_node,
2333 bool const_decl_allowed_p)
27becfc8 2334{
7464361a 2335 /* Whether the object has static storage duration, either explicitly or by
2336 virtue of being declared at the global level. */
2337 const bool static_storage = static_flag || global_bindings_p ();
2338
2339 /* Whether the initializer is constant: for an external object or an object
2340 with static storage duration, we check that the initializer is a valid
2341 constant expression for initializing a static variable; otherwise, we
2342 only check that it is constant. */
2343 const bool init_const
7a0ae4af 2344 = (init
2345 && gnat_types_compatible_p (type, TREE_TYPE (init))
7464361a 2346 && (extern_flag || static_storage
7a0ae4af 2347 ? initializer_constant_valid_p (init, TREE_TYPE (init))
7464361a 2348 != NULL_TREE
7a0ae4af 2349 : TREE_CONSTANT (init)));
27becfc8 2350
2351 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
7464361a 2352 case the initializer may be used in lieu of the DECL node (as done in
27becfc8 2353 Identifier_to_gnu). This is useful to prevent the need of elaboration
7464361a 2354 code when an identifier for which such a DECL is made is in turn used
2355 as an initializer. We used to rely on CONST_DECL vs VAR_DECL for this,
2356 but extra constraints apply to this choice (see below) and they are not
2357 relevant to the distinction we wish to make. */
2358 const bool constant_p = const_flag && init_const;
27becfc8 2359
2360 /* The actual DECL node. CONST_DECL was initially intended for enumerals
2361 and may be used for scalars in general but not for aggregates. */
2362 tree var_decl
f3c9930a 2363 = build_decl (input_location,
2364 (constant_p && const_decl_allowed_p
27becfc8 2365 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
7a0ae4af 2366 name, type);
27becfc8 2367
2368 /* If this is external, throw away any initializations (they will be done
2369 elsewhere) unless this is a constant for which we would like to remain
2370 able to get the initializer. If we are defining a global here, leave a
2371 constant initialization and save any variable elaborations for the
2372 elaboration routine. If we are just annotating types, throw away the
2373 initialization if it isn't a constant. */
2374 if ((extern_flag && !constant_p)
7a0ae4af 2375 || (type_annotate_only && init && !TREE_CONSTANT (init)))
2376 init = NULL_TREE;
27becfc8 2377
7464361a 2378 /* At the global level, a non-constant initializer generates elaboration
2379 statements. Check that such statements are allowed, that is to say,
2380 not violating a No_Elaboration_Code restriction. */
7a0ae4af 2381 if (init && !init_const && global_bindings_p ())
27becfc8 2382 Check_Elaboration_Code_Allowed (gnat_node);
e4f9d5d8 2383
4905002b 2384 /* Attach the initializer, if any. */
7a0ae4af 2385 DECL_INITIAL (var_decl) = init;
4905002b 2386
2387 /* Directly set some flags. */
2388 DECL_ARTIFICIAL (var_decl) = artificial_p;
e2b35abe 2389 DECL_EXTERNAL (var_decl) = extern_flag;
e2b35abe 2390 TREE_CONSTANT (var_decl) = constant_p;
4905002b 2391 TREE_READONLY (var_decl) = const_flag;
7464361a 2392
2393 /* We need to allocate static storage for an object with static storage
2394 duration if it isn't external. */
2395 TREE_STATIC (var_decl) = !extern_flag && static_storage;
2396
2397 /* The object is public if it is external or if it is declared public
2398 and has static storage duration. */
2399 TREE_PUBLIC (var_decl) = extern_flag || (public_flag && static_storage);
27becfc8 2400
2401 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
2402 try to fiddle with DECL_COMMON. However, on platforms that don't
2403 support global BSS sections, uninitialized global variables would
2404 go in DATA instead, thus increasing the size of the executable. */
2405 if (!flag_no_common
2406 && TREE_CODE (var_decl) == VAR_DECL
e4f9d5d8 2407 && TREE_PUBLIC (var_decl)
27becfc8 2408 && !have_global_bss_p ())
2409 DECL_COMMON (var_decl) = 1;
27becfc8 2410
4905002b 2411 /* Do not emit debug info for a CONST_DECL if optimization isn't enabled,
2412 since we will create an associated variable. Likewise for an external
2413 constant whose initializer is not absolute, because this would mean a
2414 global relocation in a read-only section which runs afoul of the PE-COFF
2415 run-time relocation mechanism. */
2416 if (!debug_info_p
2417 || (TREE_CODE (var_decl) == CONST_DECL && !optimize)
2418 || (extern_flag
2419 && constant_p
7a0ae4af 2420 && init
2421 && initializer_constant_valid_p (init, TREE_TYPE (init))
4905002b 2422 != null_pointer_node))
477305bf 2423 DECL_IGNORED_P (var_decl) = 1;
2424
7464361a 2425 if (TYPE_VOLATILE (type))
2426 TREE_SIDE_EFFECTS (var_decl) = TREE_THIS_VOLATILE (var_decl) = 1;
2427
27becfc8 2428 if (TREE_SIDE_EFFECTS (var_decl))
2429 TREE_ADDRESSABLE (var_decl) = 1;
2430
081f18cf 2431 /* ??? Some attributes cannot be applied to CONST_DECLs. */
2432 if (TREE_CODE (var_decl) == VAR_DECL)
2433 process_attributes (&var_decl, &attr_list, true, gnat_node);
2434
2435 /* Add this decl to the current binding level. */
2436 gnat_pushdecl (var_decl, gnat_node);
2437
0c6fd2e5 2438 if (TREE_CODE (var_decl) == VAR_DECL && asm_name)
27becfc8 2439 {
0c6fd2e5 2440 /* Let the target mangle the name if this isn't a verbatim asm. */
2441 if (*IDENTIFIER_POINTER (asm_name) != '*')
2442 asm_name = targetm.mangle_decl_assembler_name (var_decl, asm_name);
081f18cf 2443
0c6fd2e5 2444 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
27becfc8 2445 }
27becfc8 2446
2447 return var_decl;
2448}
2449\f
2450/* Return true if TYPE, an aggregate type, contains (or is) an array. */
2451
2452static bool
2453aggregate_type_contains_array_p (tree type)
2454{
2455 switch (TREE_CODE (type))
2456 {
2457 case RECORD_TYPE:
2458 case UNION_TYPE:
2459 case QUAL_UNION_TYPE:
2460 {
2461 tree field;
1767a056 2462 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
27becfc8 2463 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
2464 && aggregate_type_contains_array_p (TREE_TYPE (field)))
2465 return true;
2466 return false;
2467 }
2468
2469 case ARRAY_TYPE:
2470 return true;
2471
2472 default:
2473 gcc_unreachable ();
2474 }
2475}
2476
7a0ae4af 2477/* Return a FIELD_DECL node. NAME is the field's name, TYPE is its type and
2478 RECORD_TYPE is the type of the enclosing record. If SIZE is nonzero, it
2479 is the specified size of the field. If POS is nonzero, it is the bit
2480 position. PACKED is 1 if the enclosing record is packed, -1 if it has
2481 Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
81c30835 2482 means we are allowed to take the address of the field; if it is negative,
2483 we should not make a bitfield, which is used by make_aligning_type. */
27becfc8 2484
2485tree
7a0ae4af 2486create_field_decl (tree name, tree type, tree record_type, tree size, tree pos,
2487 int packed, int addressable)
27becfc8 2488{
7a0ae4af 2489 tree field_decl = build_decl (input_location, FIELD_DECL, name, type);
27becfc8 2490
2491 DECL_CONTEXT (field_decl) = record_type;
7a0ae4af 2492 TREE_READONLY (field_decl) = TYPE_READONLY (type);
27becfc8 2493
2494 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
2495 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
2496 Likewise for an aggregate without specified position that contains an
2497 array, because in this case slices of variable length of this array
2498 must be handled by GCC and variable-sized objects need to be aligned
2499 to at least a byte boundary. */
7a0ae4af 2500 if (packed && (TYPE_MODE (type) == BLKmode
27becfc8 2501 || (!pos
7a0ae4af 2502 && AGGREGATE_TYPE_P (type)
2503 && aggregate_type_contains_array_p (type))))
27becfc8 2504 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
2505
2506 /* If a size is specified, use it. Otherwise, if the record type is packed
2507 compute a size to use, which may differ from the object's natural size.
2508 We always set a size in this case to trigger the checks for bitfield
2509 creation below, which is typically required when no position has been
2510 specified. */
2511 if (size)
2512 size = convert (bitsizetype, size);
2513 else if (packed == 1)
2514 {
7a0ae4af 2515 size = rm_size (type);
2516 if (TYPE_MODE (type) == BLKmode)
81c30835 2517 size = round_up (size, BITS_PER_UNIT);
27becfc8 2518 }
2519
2520 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
2521 specified for two reasons: first if the size differs from the natural
2522 size. Second, if the alignment is insufficient. There are a number of
2523 ways the latter can be true.
2524
2525 We never make a bitfield if the type of the field has a nonconstant size,
2526 because no such entity requiring bitfield operations should reach here.
2527
2528 We do *preventively* make a bitfield when there might be the need for it
2529 but we don't have all the necessary information to decide, as is the case
2530 of a field with no specified position in a packed record.
2531
2532 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
2533 in layout_decl or finish_record_type to clear the bit_field indication if
2534 it is in fact not needed. */
2535 if (addressable >= 0
2536 && size
2537 && TREE_CODE (size) == INTEGER_CST
7a0ae4af 2538 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2539 && (!tree_int_cst_equal (size, TYPE_SIZE (type))
2540 || (pos && !value_factor_p (pos, TYPE_ALIGN (type)))
27becfc8 2541 || packed
2542 || (TYPE_ALIGN (record_type) != 0
7a0ae4af 2543 && TYPE_ALIGN (record_type) < TYPE_ALIGN (type))))
27becfc8 2544 {
2545 DECL_BIT_FIELD (field_decl) = 1;
2546 DECL_SIZE (field_decl) = size;
2547 if (!packed && !pos)
4880a940 2548 {
2549 if (TYPE_ALIGN (record_type) != 0
7a0ae4af 2550 && TYPE_ALIGN (record_type) < TYPE_ALIGN (type))
4880a940 2551 DECL_ALIGN (field_decl) = TYPE_ALIGN (record_type);
2552 else
7a0ae4af 2553 DECL_ALIGN (field_decl) = TYPE_ALIGN (type);
4880a940 2554 }
27becfc8 2555 }
2556
2557 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
2558
2559 /* Bump the alignment if need be, either for bitfield/packing purposes or
2560 to satisfy the type requirements if no such consideration applies. When
2561 we get the alignment from the type, indicate if this is from an explicit
2562 user request, which prevents stor-layout from lowering it later on. */
2563 {
7cfdc2f0 2564 unsigned int bit_align
27becfc8 2565 = (DECL_BIT_FIELD (field_decl) ? 1
7a0ae4af 2566 : packed && TYPE_MODE (type) != BLKmode ? BITS_PER_UNIT : 0);
27becfc8 2567
2568 if (bit_align > DECL_ALIGN (field_decl))
2569 DECL_ALIGN (field_decl) = bit_align;
7a0ae4af 2570 else if (!bit_align && TYPE_ALIGN (type) > DECL_ALIGN (field_decl))
27becfc8 2571 {
7a0ae4af 2572 DECL_ALIGN (field_decl) = TYPE_ALIGN (type);
2573 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (type);
27becfc8 2574 }
2575 }
2576
2577 if (pos)
2578 {
2579 /* We need to pass in the alignment the DECL is known to have.
2580 This is the lowest-order bit set in POS, but no more than
2581 the alignment of the record, if one is specified. Note
2582 that an alignment of 0 is taken as infinite. */
2583 unsigned int known_align;
2584
e913b5cd 2585 if (tree_fits_uhwi_p (pos))
2586 known_align = tree_to_uhwi (pos) & - tree_to_uhwi (pos);
27becfc8 2587 else
2588 known_align = BITS_PER_UNIT;
2589
2590 if (TYPE_ALIGN (record_type)
2591 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
2592 known_align = TYPE_ALIGN (record_type);
2593
2594 layout_decl (field_decl, known_align);
2595 SET_DECL_OFFSET_ALIGN (field_decl,
e913b5cd 2596 tree_fits_uhwi_p (pos) ? BIGGEST_ALIGNMENT
27becfc8 2597 : BITS_PER_UNIT);
2598 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
2599 &DECL_FIELD_BIT_OFFSET (field_decl),
2600 DECL_OFFSET_ALIGN (field_decl), pos);
27becfc8 2601 }
2602
2603 /* In addition to what our caller says, claim the field is addressable if we
2604 know that its type is not suitable.
2605
2606 The field may also be "technically" nonaddressable, meaning that even if
2607 we attempt to take the field's address we will actually get the address
2608 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
2609 value we have at this point is not accurate enough, so we don't account
2610 for this here and let finish_record_type decide. */
7a0ae4af 2611 if (!addressable && !type_for_nonaliased_component_p (type))
27becfc8 2612 addressable = 1;
2613
2614 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
2615
2616 return field_decl;
2617}
2618\f
7a0ae4af 2619/* Return a PARM_DECL node. NAME is the name of the parameter and TYPE is
2620 its type. READONLY is true if the parameter is readonly (either an In
2621 parameter or an address of a pass-by-ref parameter). */
27becfc8 2622
2623tree
7a0ae4af 2624create_param_decl (tree name, tree type, bool readonly)
27becfc8 2625{
7a0ae4af 2626 tree param_decl = build_decl (input_location, PARM_DECL, name, type);
27becfc8 2627
a002cb99 2628 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
2629 can lead to various ABI violations. */
2630 if (targetm.calls.promote_prototypes (NULL_TREE)
7a0ae4af 2631 && INTEGRAL_TYPE_P (type)
2632 && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node))
27becfc8 2633 {
2634 /* We have to be careful about biased types here. Make a subtype
2635 of integer_type_node with the proper biasing. */
7a0ae4af 2636 if (TREE_CODE (type) == INTEGER_TYPE
2637 && TYPE_BIASED_REPRESENTATION_P (type))
27becfc8 2638 {
a9538d68 2639 tree subtype
2640 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
211df513 2641 TREE_TYPE (subtype) = integer_type_node;
2642 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
7a0ae4af 2643 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (type));
2644 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (type));
2645 type = subtype;
27becfc8 2646 }
2647 else
7a0ae4af 2648 type = integer_type_node;
27becfc8 2649 }
2650
7a0ae4af 2651 DECL_ARG_TYPE (param_decl) = type;
27becfc8 2652 TREE_READONLY (param_decl) = readonly;
2653 return param_decl;
2654}
2655\f
081f18cf 2656/* Process the attributes in ATTR_LIST for NODE, which is either a DECL or
2657 a TYPE. If IN_PLACE is true, the tree pointed to by NODE should not be
2658 changed. GNAT_NODE is used for the position of error messages. */
27becfc8 2659
081f18cf 2660void
2661process_attributes (tree *node, struct attrib **attr_list, bool in_place,
2662 Node_Id gnat_node)
27becfc8 2663{
081f18cf 2664 struct attrib *attr;
2665
2666 for (attr = *attr_list; attr; attr = attr->next)
2667 switch (attr->type)
27becfc8 2668 {
2669 case ATTR_MACHINE_ATTRIBUTE:
081f18cf 2670 Sloc_to_locus (Sloc (gnat_node), &input_location);
2671 decl_attributes (node, tree_cons (attr->name, attr->args, NULL_TREE),
2672 in_place ? ATTR_FLAG_TYPE_IN_PLACE : 0);
27becfc8 2673 break;
2674
2675 case ATTR_LINK_ALIAS:
081f18cf 2676 if (!DECL_EXTERNAL (*node))
27becfc8 2677 {
081f18cf 2678 TREE_STATIC (*node) = 1;
2679 assemble_alias (*node, attr->name);
27becfc8 2680 }
2681 break;
2682
2683 case ATTR_WEAK_EXTERNAL:
2684 if (SUPPORTS_WEAK)
081f18cf 2685 declare_weak (*node);
27becfc8 2686 else
2687 post_error ("?weak declarations not supported on this target",
081f18cf 2688 attr->error_point);
27becfc8 2689 break;
2690
2691 case ATTR_LINK_SECTION:
218e3e4e 2692 if (targetm_common.have_named_sections)
27becfc8 2693 {
1a3c0b14 2694 set_decl_section_name (*node, IDENTIFIER_POINTER (attr->name));
081f18cf 2695 DECL_COMMON (*node) = 0;
27becfc8 2696 }
2697 else
2698 post_error ("?section attributes are not supported for this target",
081f18cf 2699 attr->error_point);
27becfc8 2700 break;
2701
2702 case ATTR_LINK_CONSTRUCTOR:
081f18cf 2703 DECL_STATIC_CONSTRUCTOR (*node) = 1;
2704 TREE_USED (*node) = 1;
27becfc8 2705 break;
2706
2707 case ATTR_LINK_DESTRUCTOR:
081f18cf 2708 DECL_STATIC_DESTRUCTOR (*node) = 1;
2709 TREE_USED (*node) = 1;
27becfc8 2710 break;
90d3e56e 2711
2712 case ATTR_THREAD_LOCAL_STORAGE:
5e68df57 2713 set_decl_tls_model (*node, decl_default_tls_model (*node));
081f18cf 2714 DECL_COMMON (*node) = 0;
90d3e56e 2715 break;
27becfc8 2716 }
081f18cf 2717
2718 *attr_list = NULL;
27becfc8 2719}
27becfc8 2720
2721/* Return true if VALUE is a known to be a multiple of FACTOR, which must be
2722 a power of 2. */
2723
2724bool
2725value_factor_p (tree value, HOST_WIDE_INT factor)
2726{
e913b5cd 2727 if (tree_fits_uhwi_p (value))
2728 return tree_to_uhwi (value) % factor == 0;
27becfc8 2729
2730 if (TREE_CODE (value) == MULT_EXPR)
2731 return (value_factor_p (TREE_OPERAND (value, 0), factor)
2732 || value_factor_p (TREE_OPERAND (value, 1), factor));
2733
2734 return false;
2735}
2736
11f3f0ba 2737/* Return whether GNAT_NODE is a defining identifier for a renaming that comes
2738 from the parameter association for the instantiation of a generic. We do
2739 not want to emit source location for them: the code generated for their
2740 initialization is likely to disturb debugging. */
2741
2742bool
2743renaming_from_generic_instantiation_p (Node_Id gnat_node)
2744{
2745 if (Nkind (gnat_node) != N_Defining_Identifier
2746 || !IN (Ekind (gnat_node), Object_Kind)
2747 || Comes_From_Source (gnat_node)
2748 || !Present (Renamed_Object (gnat_node)))
2749 return false;
2750
2751 /* Get the object declaration of the renamed object, if any and if the
2752 renamed object is a mere identifier. */
2753 gnat_node = Renamed_Object (gnat_node);
2754 if (Nkind (gnat_node) != N_Identifier)
2755 return false;
2756
2757 gnat_node = Entity (gnat_node);
2758 if (!Present (Parent (gnat_node)))
2759 return false;
2760
2761 gnat_node = Parent (gnat_node);
2762 return
2763 (Present (gnat_node)
2764 && Nkind (gnat_node) == N_Object_Declaration
2765 && Present (Corresponding_Generic_Association (gnat_node)));
2766}
2767
810e94f8 2768/* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
2769 feed it with the elaboration of GNAT_SCOPE. */
2770
2771static struct deferred_decl_context_node *
2772add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global)
2773{
2774 struct deferred_decl_context_node *new_node;
2775
2776 new_node
2777 = (struct deferred_decl_context_node * ) xmalloc (sizeof (*new_node));
2778 new_node->decl = decl;
2779 new_node->gnat_scope = gnat_scope;
2780 new_node->force_global = force_global;
2781 new_node->types.create (1);
2782 new_node->next = deferred_decl_context_queue;
2783 deferred_decl_context_queue = new_node;
2784 return new_node;
2785}
2786
2787/* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
2788 feed it with the DECL_CONTEXT computed as part of N as soon as it is
2789 computed. */
2790
2791static void
2792add_deferred_type_context (struct deferred_decl_context_node *n, tree type)
2793{
2794 n->types.safe_push (type);
2795}
2796
2797/* Get the GENERIC node corresponding to GNAT_SCOPE, if available. Return
2798 NULL_TREE if it is not available. */
2799
2800static tree
2801compute_deferred_decl_context (Entity_Id gnat_scope)
2802{
2803 tree context;
2804
2805 if (present_gnu_tree (gnat_scope))
2806 context = get_gnu_tree (gnat_scope);
2807 else
2808 return NULL_TREE;
2809
2810 if (TREE_CODE (context) == TYPE_DECL)
2811 {
2812 const tree context_type = TREE_TYPE (context);
2813
2814 /* Skip dummy types: only the final ones can appear in the context
2815 chain. */
2816 if (TYPE_DUMMY_P (context_type))
2817 return NULL_TREE;
2818
2819 /* ..._TYPE nodes are more useful than TYPE_DECL nodes in the context
2820 chain. */
2821 else
2822 context = context_type;
2823 }
2824
2825 return context;
2826}
2827
2828/* Try to process all deferred nodes in the queue. Keep in the queue the ones
2829 that cannot be processed yet, remove the other ones. If FORCE is true,
2830 force the processing for all nodes, use the global context when nodes don't
2831 have a GNU translation. */
2832
2833void
2834process_deferred_decl_context (bool force)
2835{
2836 struct deferred_decl_context_node **it = &deferred_decl_context_queue;
2837 struct deferred_decl_context_node *node;
2838
2839 while (*it != NULL)
2840 {
2841 bool processed = false;
2842 tree context = NULL_TREE;
2843 Entity_Id gnat_scope;
2844
2845 node = *it;
2846
2847 /* If FORCE, get the innermost elaborated scope. Otherwise, just try to
2848 get the first scope. */
2849 gnat_scope = node->gnat_scope;
2850 while (Present (gnat_scope))
2851 {
2852 context = compute_deferred_decl_context (gnat_scope);
2853 if (!force || context != NULL_TREE)
2854 break;
2855 gnat_scope = get_debug_scope (gnat_scope, NULL);
2856 }
2857
2858 /* Imported declarations must not be in a local context (i.e. not inside
2859 a function). */
2860 if (context != NULL_TREE && node->force_global > 0)
2861 {
2862 tree ctx = context;
2863
2864 while (ctx != NULL_TREE)
2865 {
2866 gcc_assert (TREE_CODE (ctx) != FUNCTION_DECL);
2867 ctx = (DECL_P (ctx))
2868 ? DECL_CONTEXT (ctx)
2869 : TYPE_CONTEXT (ctx);
2870 }
2871 }
2872
2873 /* If FORCE, we want to get rid of all nodes in the queue: in case there
2874 was no elaborated scope, use the global context. */
2875 if (force && context == NULL_TREE)
2876 context = get_global_context ();
2877
2878 if (context != NULL_TREE)
2879 {
2880 tree t;
2881 int i;
2882
2883 DECL_CONTEXT (node->decl) = context;
2884
2885 /* Propagate it to the TYPE_CONTEXT attributes of the requested
2886 ..._TYPE nodes. */
2887 FOR_EACH_VEC_ELT (node->types, i, t)
2888 {
ba502e2b 2889 gnat_set_type_context (t, context);
810e94f8 2890 }
2891 processed = true;
2892 }
2893
2894 /* If this node has been successfuly processed, remove it from the
2895 queue. Then move to the next node. */
2896 if (processed)
2897 {
2898 *it = node->next;
2899 node->types.release ();
2900 free (node);
2901 }
2902 else
2903 it = &node->next;
2904 }
2905}
2906
2907
819ab09a 2908/* Return VALUE scaled by the biggest power-of-2 factor of EXPR. */
2909
2910static unsigned int
2911scale_by_factor_of (tree expr, unsigned int value)
2912{
4a53d91c 2913 unsigned HOST_WIDE_INT addend = 0;
2914 unsigned HOST_WIDE_INT factor = 1;
2915
2916 /* Peel conversions around EXPR and try to extract bodies from function
2917 calls: it is possible to get the scale factor from size functions. */
819ab09a 2918 expr = remove_conversions (expr, true);
4a53d91c 2919 if (TREE_CODE (expr) == CALL_EXPR)
2920 expr = maybe_inline_call_in_expr (expr);
2921
2922 /* Sometimes we get PLUS_EXPR (BIT_AND_EXPR (..., X), Y), where Y is a
2923 multiple of the scale factor we are looking for. */
2924 if (TREE_CODE (expr) == PLUS_EXPR
2925 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2926 && tree_fits_uhwi_p (TREE_OPERAND (expr, 1)))
2927 {
2928 addend = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
2929 expr = TREE_OPERAND (expr, 0);
2930 }
819ab09a 2931
2932 /* An expression which is a bitwise AND with a mask has a power-of-2 factor
2933 corresponding to the number of trailing zeros of the mask. */
2934 if (TREE_CODE (expr) == BIT_AND_EXPR
2935 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST)
2936 {
f9ae6f95 2937 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
819ab09a 2938 unsigned int i = 0;
2939
2940 while ((mask & 1) == 0 && i < HOST_BITS_PER_WIDE_INT)
2941 {
2942 mask >>= 1;
4a53d91c 2943 factor *= 2;
819ab09a 2944 i++;
2945 }
2946 }
2947
4a53d91c 2948 /* If the addend is not a multiple of the factor we found, give up. In
2949 theory we could find a smaller common factor but it's useless for our
2950 needs. This situation arises when dealing with a field F1 with no
2951 alignment requirement but that is following a field F2 with such
2952 requirements. As long as we have F2's offset, we don't need alignment
2953 information to compute F1's. */
2954 if (addend % factor != 0)
2955 factor = 1;
2956
2957 return factor * value;
819ab09a 2958}
2959
2cb54d0d 2960/* Given two consecutive field decls PREV_FIELD and CURR_FIELD, return true
27becfc8 2961 unless we can prove these 2 fields are laid out in such a way that no gap
2962 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
2963 is the distance in bits between the end of PREV_FIELD and the starting
2964 position of CURR_FIELD. It is ignored if null. */
2965
2966static bool
2967potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
2968{
2969 /* If this is the first field of the record, there cannot be any gap */
2970 if (!prev_field)
2971 return false;
2972
c6ac288c 2973 /* If the previous field is a union type, then return false: The only
27becfc8 2974 time when such a field is not the last field of the record is when
2975 there are other components at fixed positions after it (meaning there
2976 was a rep clause for every field), in which case we don't want the
2977 alignment constraint to override them. */
2978 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
2979 return false;
2980
2981 /* If the distance between the end of prev_field and the beginning of
2982 curr_field is constant, then there is a gap if the value of this
2983 constant is not null. */
e913b5cd 2984 if (offset && tree_fits_uhwi_p (offset))
27becfc8 2985 return !integer_zerop (offset);
2986
2987 /* If the size and position of the previous field are constant,
2988 then check the sum of this size and position. There will be a gap
2989 iff it is not multiple of the current field alignment. */
e913b5cd 2990 if (tree_fits_uhwi_p (DECL_SIZE (prev_field))
2991 && tree_fits_uhwi_p (bit_position (prev_field)))
2992 return ((tree_to_uhwi (bit_position (prev_field))
2993 + tree_to_uhwi (DECL_SIZE (prev_field)))
27becfc8 2994 % DECL_ALIGN (curr_field) != 0);
2995
2996 /* If both the position and size of the previous field are multiples
2997 of the current field alignment, there cannot be any gap. */
2998 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
2999 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
3000 return false;
3001
3002 /* Fallback, return that there may be a potential gap */
3003 return true;
3004}
3005
7a0ae4af 3006/* Return a LABEL_DECL with NAME. GNAT_NODE is used for the position of
3007 the decl. */
27becfc8 3008
3009tree
7a0ae4af 3010create_label_decl (tree name, Node_Id gnat_node)
27becfc8 3011{
7e9d30ae 3012 tree label_decl
7a0ae4af 3013 = build_decl (input_location, LABEL_DECL, name, void_type_node);
27becfc8 3014
7e9d30ae 3015 DECL_MODE (label_decl) = VOIDmode;
3016
3017 /* Add this decl to the current binding level. */
3018 gnat_pushdecl (label_decl, gnat_node);
27becfc8 3019
3020 return label_decl;
3021}
3022\f
7a0ae4af 3023/* Return a FUNCTION_DECL node. NAME is the name of the subprogram, ASM_NAME
3024 its assembler name, TYPE its type (a FUNCTION_TYPE node), PARAM_DECL_LIST
3025 the list of its parameters (a list of PARM_DECL nodes chained through the
3026 DECL_CHAIN field).
27becfc8 3027
4905002b 3028 INLINE_STATUS, PUBLIC_FLAG, EXTERN_FLAG and ATTR_LIST are used to set the
3029 appropriate fields in the FUNCTION_DECL.
3030
3031 ARTIFICIAL_P is true if the subprogram was generated by the compiler.
3032
3033 DEBUG_INFO_P is true if we need to write debug information for it.
3034
3035 GNAT_NODE is used for the position of the decl. */
27becfc8 3036
3037tree
7a0ae4af 3038create_subprog_decl (tree name, tree asm_name, tree type, tree param_decl_list,
3039 enum inline_status_t inline_status, bool public_flag,
3040 bool extern_flag, bool artificial_p, bool debug_info_p,
3041 struct attrib *attr_list, Node_Id gnat_node)
27becfc8 3042{
7a0ae4af 3043 tree subprog_decl = build_decl (input_location, FUNCTION_DECL, name, type);
3044 tree result_decl
3045 = build_decl (input_location, RESULT_DECL, NULL_TREE, TREE_TYPE (type));
2cb54d0d 3046 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
27becfc8 3047
4905002b 3048 DECL_ARTIFICIAL (subprog_decl) = artificial_p;
2cb54d0d 3049 DECL_EXTERNAL (subprog_decl) = extern_flag;
00b45d95 3050
3051 switch (inline_status)
3052 {
3053 case is_suppressed:
3054 DECL_UNINLINABLE (subprog_decl) = 1;
3055 break;
3056
3057 case is_disabled:
3058 break;
3059
9fac98bb 3060 case is_required:
3061 if (Back_End_Inlining)
6fa4bf38 3062 decl_attributes (&subprog_decl,
3063 tree_cons (get_identifier ("always_inline"),
3064 NULL_TREE, NULL_TREE),
3065 ATTR_FLAG_TYPE_IN_PLACE);
3066
9fac98bb 3067 /* ... fall through ... */
3068
00b45d95 3069 case is_enabled:
3070 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
4905002b 3071 DECL_NO_INLINE_WARNING_P (subprog_decl) = artificial_p;
00b45d95 3072 break;
3073
3074 default:
3075 gcc_unreachable ();
3076 }
2cb54d0d 3077
4905002b 3078 if (!debug_info_p)
3079 DECL_IGNORED_P (subprog_decl) = 1;
3080
2cb54d0d 3081 TREE_PUBLIC (subprog_decl) = public_flag;
7a0ae4af 3082 TREE_READONLY (subprog_decl) = TYPE_READONLY (type);
3083 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (type);
3084 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (type);
27becfc8 3085
4cd5bb61 3086 DECL_ARTIFICIAL (result_decl) = 1;
3087 DECL_IGNORED_P (result_decl) = 1;
7a0ae4af 3088 DECL_BY_REFERENCE (result_decl) = TREE_ADDRESSABLE (type);
4cd5bb61 3089 DECL_RESULT (subprog_decl) = result_decl;
27becfc8 3090
7ebb8c82 3091 process_attributes (&subprog_decl, &attr_list, true, gnat_node);
3092
3093 /* Add this decl to the current binding level. */
3094 gnat_pushdecl (subprog_decl, gnat_node);
3095
27becfc8 3096 if (asm_name)
3097 {
7ebb8c82 3098 /* Let the target mangle the name if this isn't a verbatim asm. */
3099 if (*IDENTIFIER_POINTER (asm_name) != '*')
3100 asm_name = targetm.mangle_decl_assembler_name (subprog_decl, asm_name);
3101
27becfc8 3102 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
3103
3104 /* The expand_main_function circuitry expects "main_identifier_node" to
3105 designate the DECL_NAME of the 'main' entry point, in turn expected
3106 to be declared as the "main" function literally by default. Ada
3107 program entry points are typically declared with a different name
3108 within the binder generated file, exported as 'main' to satisfy the
90aa6d7d 3109 system expectations. Force main_identifier_node in this case. */
27becfc8 3110 if (asm_name == main_identifier_node)
90aa6d7d 3111 DECL_NAME (subprog_decl) = main_identifier_node;
27becfc8 3112 }
3113
27becfc8 3114 /* Output the assembler code and/or RTL for the declaration. */
3115 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
3116
3117 return subprog_decl;
3118}
3119\f
3120/* Set up the framework for generating code for SUBPROG_DECL, a subprogram
3121 body. This routine needs to be invoked before processing the declarations
3122 appearing in the subprogram. */
3123
3124void
3125begin_subprog_body (tree subprog_decl)
3126{
3127 tree param_decl;
3128
27becfc8 3129 announce_function (subprog_decl);
3130
aaaf92e8 3131 /* This function is being defined. */
3132 TREE_STATIC (subprog_decl) = 1;
3133
7a4047d6 3134 /* The failure of this assertion will likely come from a wrong context for
3135 the subprogram body, e.g. another procedure for a procedure declared at
3136 library level. */
3137 gcc_assert (current_function_decl == decl_function_context (subprog_decl));
3138
ac45dde2 3139 current_function_decl = subprog_decl;
3140
27becfc8 3141 /* Enter a new binding level and show that all the parameters belong to
3142 this function. */
3143 gnat_pushlevel ();
db72a63f 3144
27becfc8 3145 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
1767a056 3146 param_decl = DECL_CHAIN (param_decl))
27becfc8 3147 DECL_CONTEXT (param_decl) = subprog_decl;
3148
3149 make_decl_rtl (subprog_decl);
27becfc8 3150}
3151
c97bd6af 3152/* Finish translating the current subprogram and set its BODY. */
27becfc8 3153
3154void
bfec3452 3155end_subprog_body (tree body)
27becfc8 3156{
3157 tree fndecl = current_function_decl;
3158
a616b634 3159 /* Attach the BLOCK for this level to the function and pop the level. */
27becfc8 3160 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
3161 DECL_INITIAL (fndecl) = current_binding_level->block;
3162 gnat_poplevel ();
3163
27becfc8 3164 /* Mark the RESULT_DECL as being in this subprogram. */
3165 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
3166
04dd9724 3167 /* The body should be a BIND_EXPR whose BLOCK is the top-level one. */
3168 if (TREE_CODE (body) == BIND_EXPR)
3169 {
3170 BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body)) = fndecl;
3171 DECL_INITIAL (fndecl) = BIND_EXPR_BLOCK (body);
3172 }
3173
27becfc8 3174 DECL_SAVED_TREE (fndecl) = body;
3175
eac916f9 3176 current_function_decl = decl_function_context (fndecl);
c97bd6af 3177}
3178
3179/* Wrap up compilation of SUBPROG_DECL, a subprogram body. */
27becfc8 3180
c97bd6af 3181void
3182rest_of_subprog_body_compilation (tree subprog_decl)
3183{
27becfc8 3184 /* We cannot track the location of errors past this point. */
3185 error_gnat_node = Empty;
3186
3187 /* If we're only annotating types, don't actually compile this function. */
3188 if (type_annotate_only)
3189 return;
3190
bfec3452 3191 /* Dump functions before gimplification. */
c97bd6af 3192 dump_function (TDI_original, subprog_decl);
bfec3452 3193
eac916f9 3194 if (!decl_function_context (subprog_decl))
35ee1c66 3195 cgraph_node::finalize_function (subprog_decl, false);
27becfc8 3196 else
3197 /* Register this function with cgraph just far enough to get it
3198 added to our parent's nested function list. */
727d4825 3199 (void) cgraph_node::get_create (subprog_decl);
27becfc8 3200}
3201
27becfc8 3202tree
3203gnat_builtin_function (tree decl)
3204{
3205 gnat_pushdecl (decl, Empty);
3206 return decl;
3207}
3208
3209/* Return an integer type with the number of bits of precision given by
3210 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
3211 it is a signed type. */
3212
3213tree
3214gnat_type_for_size (unsigned precision, int unsignedp)
3215{
3216 tree t;
3217 char type_name[20];
3218
3219 if (precision <= 2 * MAX_BITS_PER_WORD
3220 && signed_and_unsigned_types[precision][unsignedp])
3221 return signed_and_unsigned_types[precision][unsignedp];
3222
3223 if (unsignedp)
3224 t = make_unsigned_type (precision);
3225 else
3226 t = make_signed_type (precision);
3227
3228 if (precision <= 2 * MAX_BITS_PER_WORD)
3229 signed_and_unsigned_types[precision][unsignedp] = t;
3230
3231 if (!TYPE_NAME (t))
3232 {
84f43acc 3233 sprintf (type_name, "%sSIGNED_%u", unsignedp ? "UN" : "", precision);
27becfc8 3234 TYPE_NAME (t) = get_identifier (type_name);
3235 }
3236
3237 return t;
3238}
3239
3240/* Likewise for floating-point types. */
3241
3242static tree
3754d046 3243float_type_for_precision (int precision, machine_mode mode)
27becfc8 3244{
3245 tree t;
3246 char type_name[20];
3247
3248 if (float_types[(int) mode])
3249 return float_types[(int) mode];
3250
3251 float_types[(int) mode] = t = make_node (REAL_TYPE);
3252 TYPE_PRECISION (t) = precision;
3253 layout_type (t);
3254
3255 gcc_assert (TYPE_MODE (t) == mode);
3256 if (!TYPE_NAME (t))
3257 {
3258 sprintf (type_name, "FLOAT_%d", precision);
3259 TYPE_NAME (t) = get_identifier (type_name);
3260 }
3261
3262 return t;
3263}
3264
3265/* Return a data type that has machine mode MODE. UNSIGNEDP selects
3266 an unsigned type; otherwise a signed type is returned. */
3267
3268tree
3754d046 3269gnat_type_for_mode (machine_mode mode, int unsignedp)
27becfc8 3270{
3271 if (mode == BLKmode)
3272 return NULL_TREE;
21100898 3273
3274 if (mode == VOIDmode)
27becfc8 3275 return void_type_node;
21100898 3276
3277 if (COMPLEX_MODE_P (mode))
27becfc8 3278 return NULL_TREE;
21100898 3279
3280 if (SCALAR_FLOAT_MODE_P (mode))
27becfc8 3281 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
21100898 3282
3283 if (SCALAR_INT_MODE_P (mode))
27becfc8 3284 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
21100898 3285
3286 if (VECTOR_MODE_P (mode))
3287 {
3754d046 3288 machine_mode inner_mode = GET_MODE_INNER (mode);
21100898 3289 tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
3290 if (inner_type)
3291 return build_vector_type_for_mode (inner_type, mode);
3292 }
3293
3294 return NULL_TREE;
27becfc8 3295}
3296
3297/* Return the unsigned version of a TYPE_NODE, a scalar type. */
3298
3299tree
3300gnat_unsigned_type (tree type_node)
3301{
3302 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
3303
3304 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
3305 {
3306 type = copy_node (type);
3307 TREE_TYPE (type) = type_node;
3308 }
3309 else if (TREE_TYPE (type_node)
3310 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
3311 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
3312 {
3313 type = copy_node (type);
3314 TREE_TYPE (type) = TREE_TYPE (type_node);
3315 }
3316
3317 return type;
3318}
3319
3320/* Return the signed version of a TYPE_NODE, a scalar type. */
3321
3322tree
3323gnat_signed_type (tree type_node)
3324{
3325 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
3326
3327 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
3328 {
3329 type = copy_node (type);
3330 TREE_TYPE (type) = type_node;
3331 }
3332 else if (TREE_TYPE (type_node)
3333 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
3334 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
3335 {
3336 type = copy_node (type);
3337 TREE_TYPE (type) = TREE_TYPE (type_node);
3338 }
3339
3340 return type;
3341}
3342
3343/* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
3344 transparently converted to each other. */
3345
3346int
3347gnat_types_compatible_p (tree t1, tree t2)
3348{
3349 enum tree_code code;
3350
3351 /* This is the default criterion. */
3352 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
3353 return 1;
3354
3355 /* We only check structural equivalence here. */
3356 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
3357 return 0;
3358
52dd2567 3359 /* Vector types are also compatible if they have the same number of subparts
3360 and the same form of (scalar) element type. */
3361 if (code == VECTOR_TYPE
3362 && TYPE_VECTOR_SUBPARTS (t1) == TYPE_VECTOR_SUBPARTS (t2)
3363 && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
3364 && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
3365 return 1;
3366
52a22bc6 3367 /* Array types are also compatible if they are constrained and have the same
541afe39 3368 domain(s) and the same component type. */
27becfc8 3369 if (code == ARRAY_TYPE
686353fe 3370 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
3371 || (TYPE_DOMAIN (t1)
153edb51 3372 && TYPE_DOMAIN (t2)
686353fe 3373 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
3374 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
3375 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
52a22bc6 3376 TYPE_MAX_VALUE (TYPE_DOMAIN (t2)))))
541afe39 3377 && (TREE_TYPE (t1) == TREE_TYPE (t2)
3378 || (TREE_CODE (TREE_TYPE (t1)) == ARRAY_TYPE
3379 && gnat_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))))
27becfc8 3380 return 1;
3381
27becfc8 3382 return 0;
3383}
47c154d9 3384
c97bd6af 3385/* Return true if EXPR is a useless type conversion. */
3386
3387bool
3388gnat_useless_type_conversion (tree expr)
3389{
3390 if (CONVERT_EXPR_P (expr)
3391 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
3392 || TREE_CODE (expr) == NON_LVALUE_EXPR)
3393 return gnat_types_compatible_p (TREE_TYPE (expr),
3394 TREE_TYPE (TREE_OPERAND (expr, 0)));
3395
3396 return false;
3397}
3398
47c154d9 3399/* Return true if T, a FUNCTION_TYPE, has the specified list of flags. */
3400
3401bool
3402fntype_same_flags_p (const_tree t, tree cico_list, bool return_unconstrained_p,
3403 bool return_by_direct_ref_p, bool return_by_invisi_ref_p)
3404{
3405 return TYPE_CI_CO_LIST (t) == cico_list
3406 && TYPE_RETURN_UNCONSTRAINED_P (t) == return_unconstrained_p
3407 && TYPE_RETURN_BY_DIRECT_REF_P (t) == return_by_direct_ref_p
3408 && TREE_ADDRESSABLE (t) == return_by_invisi_ref_p;
3409}
27becfc8 3410\f
3411/* EXP is an expression for the size of an object. If this size contains
3412 discriminant references, replace them with the maximum (if MAX_P) or
3413 minimum (if !MAX_P) possible value of the discriminant. */
3414
3415tree
3416max_size (tree exp, bool max_p)
3417{
3418 enum tree_code code = TREE_CODE (exp);
3419 tree type = TREE_TYPE (exp);
3420
3421 switch (TREE_CODE_CLASS (code))
3422 {
3423 case tcc_declaration:
3424 case tcc_constant:
3425 return exp;
3426
3427 case tcc_vl_exp:
3428 if (code == CALL_EXPR)
3429 {
4189e677 3430 tree t, *argarray;
3431 int n, i;
3432
3433 t = maybe_inline_call_in_expr (exp);
3434 if (t)
3435 return max_size (t, max_p);
27becfc8 3436
4189e677 3437 n = call_expr_nargs (exp);
3438 gcc_assert (n > 0);
bef91bcb 3439 argarray = XALLOCAVEC (tree, n);
27becfc8 3440 for (i = 0; i < n; i++)
3441 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
3442 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
3443 }
3444 break;
3445
3446 case tcc_reference:
3447 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
3448 modify. Otherwise, we treat it like a variable. */
6fa4bf38 3449 if (CONTAINS_PLACEHOLDER_P (exp))
3450 {
3451 tree val_type = TREE_TYPE (TREE_OPERAND (exp, 1));
3452 tree val = (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type));
3453 return max_size (convert (get_base_type (val_type), val), true);
3454 }
27becfc8 3455
6fa4bf38 3456 return exp;
27becfc8 3457
3458 case tcc_comparison:
3459 return max_p ? size_one_node : size_zero_node;
3460
3461 case tcc_unary:
97658fc9 3462 if (code == NON_LVALUE_EXPR)
3463 return max_size (TREE_OPERAND (exp, 0), max_p);
4df502f9 3464
97658fc9 3465 return fold_build1 (code, type,
3466 max_size (TREE_OPERAND (exp, 0),
3467 code == NEGATE_EXPR ? !max_p : max_p));
3468
27becfc8 3469 case tcc_binary:
97658fc9 3470 {
3471 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
3472 tree rhs = max_size (TREE_OPERAND (exp, 1),
3473 code == MINUS_EXPR ? !max_p : max_p);
3474
3475 /* Special-case wanting the maximum value of a MIN_EXPR.
3476 In that case, if one side overflows, return the other. */
3477 if (max_p && code == MIN_EXPR)
3478 {
3479 if (TREE_CODE (rhs) == INTEGER_CST && TREE_OVERFLOW (rhs))
3480 return lhs;
3481
3482 if (TREE_CODE (lhs) == INTEGER_CST && TREE_OVERFLOW (lhs))
3483 return rhs;
3484 }
3485
3486 /* Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
3487 overflowing and the RHS a variable. */
3488 if ((code == MINUS_EXPR || code == PLUS_EXPR)
3489 && TREE_CODE (lhs) == INTEGER_CST
3490 && TREE_OVERFLOW (lhs)
4c3f1f54 3491 && TREE_CODE (rhs) != INTEGER_CST)
97658fc9 3492 return lhs;
3493
4c3f1f54 3494 /* If we are going to subtract a "negative" value in an unsigned type,
3495 do the operation as an addition of the negated value, in order to
3496 avoid creating a spurious overflow below. */
3497 if (code == MINUS_EXPR
3498 && TYPE_UNSIGNED (type)
3499 && TREE_CODE (rhs) == INTEGER_CST
3500 && !TREE_OVERFLOW (rhs)
3501 && tree_int_cst_sign_bit (rhs) != 0)
3502 {
3503 rhs = fold_build1 (NEGATE_EXPR, type, rhs);
3504 code = PLUS_EXPR;
3505 }
3506
3507 /* We need to detect overflows so we call size_binop here. */
97658fc9 3508 return size_binop (code, lhs, rhs);
3509 }
3510
27becfc8 3511 case tcc_expression:
3512 switch (TREE_CODE_LENGTH (code))
3513 {
3514 case 1:
7ad4f11d 3515 if (code == SAVE_EXPR)
3516 return exp;
97658fc9 3517
3518 return fold_build1 (code, type,
3519 max_size (TREE_OPERAND (exp, 0), max_p));
27becfc8 3520
3521 case 2:
3522 if (code == COMPOUND_EXPR)
3523 return max_size (TREE_OPERAND (exp, 1), max_p);
3524
97658fc9 3525 return fold_build2 (code, type,
3526 max_size (TREE_OPERAND (exp, 0), max_p),
3527 max_size (TREE_OPERAND (exp, 1), max_p));
27becfc8 3528
3529 case 3:
7ad4f11d 3530 if (code == COND_EXPR)
27becfc8 3531 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
3532 max_size (TREE_OPERAND (exp, 1), max_p),
3533 max_size (TREE_OPERAND (exp, 2), max_p));
97658fc9 3534
3535 default:
3536 break;
27becfc8 3537 }
3538
3539 /* Other tree classes cannot happen. */
3540 default:
3541 break;
3542 }
3543
3544 gcc_unreachable ();
3545}
3546\f
3547/* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
3548 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
3549 Return a constructor for the template. */
3550
3551tree
3552build_template (tree template_type, tree array_type, tree expr)
3553{
f1f41a6c 3554 vec<constructor_elt, va_gc> *template_elts = NULL;
27becfc8 3555 tree bound_list = NULL_TREE;
3556 tree field;
3557
3558 while (TREE_CODE (array_type) == RECORD_TYPE
a98f6bec 3559 && (TYPE_PADDING_P (array_type)
27becfc8 3560 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
3561 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
3562
3563 if (TREE_CODE (array_type) == ARRAY_TYPE
3564 || (TREE_CODE (array_type) == INTEGER_TYPE
3565 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
3566 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
3567
3568 /* First make the list for a CONSTRUCTOR for the template. Go down the
3569 field list of the template instead of the type chain because this
3570 array might be an Ada array of arrays and we can't tell where the
3571 nested arrays stop being the underlying object. */
3572
3573 for (field = TYPE_FIELDS (template_type); field;
3574 (bound_list
3575 ? (bound_list = TREE_CHAIN (bound_list))
3576 : (array_type = TREE_TYPE (array_type))),
1767a056 3577 field = DECL_CHAIN (DECL_CHAIN (field)))
27becfc8 3578 {
3579 tree bounds, min, max;
3580
3581 /* If we have a bound list, get the bounds from there. Likewise
3582 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
3583 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
3584 This will give us a maximum range. */
3585 if (bound_list)
3586 bounds = TREE_VALUE (bound_list);
3587 else if (TREE_CODE (array_type) == ARRAY_TYPE)
3588 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
3589 else if (expr && TREE_CODE (expr) == PARM_DECL
3590 && DECL_BY_COMPONENT_PTR_P (expr))
3591 bounds = TREE_TYPE (field);
3592 else
3593 gcc_unreachable ();
3594
3595 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
1767a056 3596 max = convert (TREE_TYPE (DECL_CHAIN (field)), TYPE_MAX_VALUE (bounds));
27becfc8 3597
3598 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
3599 substitute it from OBJECT. */
3600 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
3601 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
3602
5cd86571 3603 CONSTRUCTOR_APPEND_ELT (template_elts, field, min);
1767a056 3604 CONSTRUCTOR_APPEND_ELT (template_elts, DECL_CHAIN (field), max);
27becfc8 3605 }
3606
5cd86571 3607 return gnat_build_constructor (template_type, template_elts);
27becfc8 3608}
3609\f
fc07fe6f 3610/* Return true if TYPE is suitable for the element type of a vector. */
3611
3612static bool
3613type_for_vector_element_p (tree type)
3614{
3754d046 3615 machine_mode mode;
fc07fe6f 3616
3617 if (!INTEGRAL_TYPE_P (type)
3618 && !SCALAR_FLOAT_TYPE_P (type)
3619 && !FIXED_POINT_TYPE_P (type))
3620 return false;
3621
3622 mode = TYPE_MODE (type);
3623 if (GET_MODE_CLASS (mode) != MODE_INT
3624 && !SCALAR_FLOAT_MODE_P (mode)
3625 && !ALL_SCALAR_FIXED_POINT_MODE_P (mode))
3626 return false;
3627
3628 return true;
3629}
3630
3631/* Return a vector type given the SIZE and the INNER_TYPE, or NULL_TREE if
3632 this is not possible. If ATTRIBUTE is non-zero, we are processing the
3633 attribute declaration and want to issue error messages on failure. */
3634
3635static tree
3636build_vector_type_for_size (tree inner_type, tree size, tree attribute)
3637{
3638 unsigned HOST_WIDE_INT size_int, inner_size_int;
3639 int nunits;
3640
3641 /* Silently punt on variable sizes. We can't make vector types for them,
3642 need to ignore them on front-end generated subtypes of unconstrained
3643 base types, and this attribute is for binding implementors, not end
3644 users, so we should never get there from legitimate explicit uses. */
3645 if (!tree_fits_uhwi_p (size))
3646 return NULL_TREE;
3647 size_int = tree_to_uhwi (size);
3648
3649 if (!type_for_vector_element_p (inner_type))
3650 {
3651 if (attribute)
3652 error ("invalid element type for attribute %qs",
3653 IDENTIFIER_POINTER (attribute));
3654 return NULL_TREE;
3655 }
3656 inner_size_int = tree_to_uhwi (TYPE_SIZE_UNIT (inner_type));
3657
3658 if (size_int % inner_size_int)
3659 {
3660 if (attribute)
3661 error ("vector size not an integral multiple of component size");
3662 return NULL_TREE;
3663 }
3664
3665 if (size_int == 0)
3666 {
3667 if (attribute)
3668 error ("zero vector size");
3669 return NULL_TREE;
3670 }
3671
3672 nunits = size_int / inner_size_int;
3673 if (nunits & (nunits - 1))
3674 {
3675 if (attribute)
3676 error ("number of components of vector not a power of two");
3677 return NULL_TREE;
3678 }
3679
3680 return build_vector_type (inner_type, nunits);
3681}
3682
3683/* Return a vector type whose representative array type is ARRAY_TYPE, or
3684 NULL_TREE if this is not possible. If ATTRIBUTE is non-zero, we are
3685 processing the attribute and want to issue error messages on failure. */
3686
3687static tree
3688build_vector_type_for_array (tree array_type, tree attribute)
3689{
3690 tree vector_type = build_vector_type_for_size (TREE_TYPE (array_type),
3691 TYPE_SIZE_UNIT (array_type),
3692 attribute);
3693 if (!vector_type)
3694 return NULL_TREE;
3695
3696 TYPE_REPRESENTATIVE_ARRAY (vector_type) = array_type;
3697 return vector_type;
3698}
3699\f
8c77dd48 3700/* Build a type to be used to represent an aliased object whose nominal type
3701 is an unconstrained array. This consists of a RECORD_TYPE containing a
3702 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
3703 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
3704 an arbitrary unconstrained object. Use NAME as the name of the record.
3705 DEBUG_INFO_P is true if we need to write debug information for the type. */
27becfc8 3706
3707tree
8c77dd48 3708build_unc_object_type (tree template_type, tree object_type, tree name,
3709 bool debug_info_p)
27becfc8 3710{
ba502e2b 3711 tree decl;
27becfc8 3712 tree type = make_node (RECORD_TYPE);
d51eba1a 3713 tree template_field
3714 = create_field_decl (get_identifier ("BOUNDS"), template_type, type,
3715 NULL_TREE, NULL_TREE, 0, 1);
3716 tree array_field
3717 = create_field_decl (get_identifier ("ARRAY"), object_type, type,
3718 NULL_TREE, NULL_TREE, 0, 1);
27becfc8 3719
3720 TYPE_NAME (type) = name;
3721 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
1767a056 3722 DECL_CHAIN (template_field) = array_field;
8c77dd48 3723 finish_record_type (type, template_field, 0, true);
3724
3725 /* Declare it now since it will never be declared otherwise. This is
3726 necessary to ensure that its subtrees are properly marked. */
ba502e2b 3727 decl = create_type_decl (name, type, true, debug_info_p, Empty);
3728
3729 /* template_type will not be used elsewhere than here, so to keep the debug
3730 info clean and in order to avoid scoping issues, make decl its
3731 context. */
3732 gnat_set_type_context (template_type, decl);
27becfc8 3733
3734 return type;
3735}
3736
3737/* Same, taking a thin or fat pointer type instead of a template type. */
3738
3739tree
3740build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
8c77dd48 3741 tree name, bool debug_info_p)
27becfc8 3742{
3743 tree template_type;
3744
a98f6bec 3745 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
27becfc8 3746
3747 template_type
a98f6bec 3748 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
1767a056 3749 ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
27becfc8 3750 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
8c77dd48 3751
3752 return
3753 build_unc_object_type (template_type, object_type, name, debug_info_p);
27becfc8 3754}
27becfc8 3755\f
27dd98d5 3756/* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
3757 In the normal case this is just two adjustments, but we have more to
3758 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
27becfc8 3759
3760void
3761update_pointer_to (tree old_type, tree new_type)
3762{
3763 tree ptr = TYPE_POINTER_TO (old_type);
3764 tree ref = TYPE_REFERENCE_TO (old_type);
cfbbebf3 3765 tree t;
27becfc8 3766
3767 /* If this is the main variant, process all the other variants first. */
3768 if (TYPE_MAIN_VARIANT (old_type) == old_type)
cfbbebf3 3769 for (t = TYPE_NEXT_VARIANT (old_type); t; t = TYPE_NEXT_VARIANT (t))
3770 update_pointer_to (t, new_type);
27becfc8 3771
27dd98d5 3772 /* If no pointers and no references, we are done. */
27becfc8 3773 if (!ptr && !ref)
3774 return;
3775
3776 /* Merge the old type qualifiers in the new type.
3777
3778 Each old variant has qualifiers for specific reasons, and the new
27dd98d5 3779 designated type as well. Each set of qualifiers represents useful
27becfc8 3780 information grabbed at some point, and merging the two simply unifies
3781 these inputs into the final type description.
3782
3783 Consider for instance a volatile type frozen after an access to constant
27dd98d5 3784 type designating it; after the designated type's freeze, we get here with
3785 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
3786 when the access type was processed. We will make a volatile and readonly
27becfc8 3787 designated type, because that's what it really is.
3788
27dd98d5 3789 We might also get here for a non-dummy OLD_TYPE variant with different
3790 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
27becfc8 3791 to private record type elaboration (see the comments around the call to
27dd98d5 3792 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
3793 the qualifiers in those cases too, to avoid accidentally discarding the
3794 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
3795 new_type
3796 = build_qualified_type (new_type,
3797 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
3798
3799 /* If old type and new type are identical, there is nothing to do. */
27becfc8 3800 if (old_type == new_type)
3801 return;
3802
3803 /* Otherwise, first handle the simple case. */
3804 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3805 {
cfbbebf3 3806 tree new_ptr, new_ref;
3807
3808 /* If pointer or reference already points to new type, nothing to do.
3809 This can happen as update_pointer_to can be invoked multiple times
3810 on the same couple of types because of the type variants. */
3811 if ((ptr && TREE_TYPE (ptr) == new_type)
3812 || (ref && TREE_TYPE (ref) == new_type))
3813 return;
3814
3815 /* Chain PTR and its variants at the end. */
3816 new_ptr = TYPE_POINTER_TO (new_type);
3817 if (new_ptr)
3818 {
3819 while (TYPE_NEXT_PTR_TO (new_ptr))
3820 new_ptr = TYPE_NEXT_PTR_TO (new_ptr);
3821 TYPE_NEXT_PTR_TO (new_ptr) = ptr;
3822 }
3823 else
3824 TYPE_POINTER_TO (new_type) = ptr;
27becfc8 3825
cfbbebf3 3826 /* Now adjust them. */
27becfc8 3827 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
cfbbebf3 3828 for (t = TYPE_MAIN_VARIANT (ptr); t; t = TYPE_NEXT_VARIANT (t))
26bf1588 3829 {
3830 TREE_TYPE (t) = new_type;
3831 if (TYPE_NULL_BOUNDS (t))
3832 TREE_TYPE (TREE_OPERAND (TYPE_NULL_BOUNDS (t), 0)) = new_type;
3833 }
2c12d03b 3834
cfbbebf3 3835 /* Chain REF and its variants at the end. */
3836 new_ref = TYPE_REFERENCE_TO (new_type);
3837 if (new_ref)
3838 {
3839 while (TYPE_NEXT_REF_TO (new_ref))
3840 new_ref = TYPE_NEXT_REF_TO (new_ref);
3841 TYPE_NEXT_REF_TO (new_ref) = ref;
3842 }
3843 else
3844 TYPE_REFERENCE_TO (new_type) = ref;
3845
3846 /* Now adjust them. */
27becfc8 3847 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
cfbbebf3 3848 for (t = TYPE_MAIN_VARIANT (ref); t; t = TYPE_NEXT_VARIANT (t))
3849 TREE_TYPE (t) = new_type;
2c12d03b 3850
3851 TYPE_POINTER_TO (old_type) = NULL_TREE;
11583c93 3852 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
27becfc8 3853 }
3854
cfbbebf3 3855 /* Now deal with the unconstrained array case. In this case the pointer
3856 is actually a record where both fields are pointers to dummy nodes.
41dd28aa 3857 Turn them into pointers to the correct types using update_pointer_to.
3858 Likewise for the pointer to the object record (thin pointer). */
27becfc8 3859 else
3860 {
41dd28aa 3861 tree new_ptr = TYPE_POINTER_TO (new_type);
cfbbebf3 3862
3863 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr));
3864
41dd28aa 3865 /* If PTR already points to NEW_TYPE, nothing to do. This can happen
cfbbebf3 3866 since update_pointer_to can be invoked multiple times on the same
3867 couple of types because of the type variants. */
3868 if (TYPE_UNCONSTRAINED_ARRAY (ptr) == new_type)
3869 return;
3870
27becfc8 3871 update_pointer_to
41dd28aa 3872 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
3873 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))));
27becfc8 3874
27becfc8 3875 update_pointer_to
41dd28aa 3876 (TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr)))),
3877 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr)))));
cfbbebf3 3878
41dd28aa 3879 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type),
3880 TYPE_OBJECT_RECORD_TYPE (new_type));
27becfc8 3881
41dd28aa 3882 TYPE_POINTER_TO (old_type) = NULL_TREE;
27becfc8 3883 }
3884}
3885\f
d47e2843 3886/* Convert EXPR, a pointer to a constrained array, into a pointer to an
3887 unconstrained one. This involves making or finding a template. */
27becfc8 3888
3889static tree
3890convert_to_fat_pointer (tree type, tree expr)
3891{
1767a056 3892 tree template_type = TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))));
d47e2843 3893 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
27becfc8 3894 tree etype = TREE_TYPE (expr);
345e38c3 3895 tree template_addr;
f1f41a6c 3896 vec<constructor_elt, va_gc> *v;
3897 vec_alloc (v, 2);
27becfc8 3898
26bf1588 3899 /* If EXPR is null, make a fat pointer that contains a null pointer to the
3900 array (compare_fat_pointers ensures that this is the full discriminant)
3901 and a valid pointer to the bounds. This latter property is necessary
3902 since the compiler can hoist the load of the bounds done through it. */
27becfc8 3903 if (integer_zerop (expr))
5cd86571 3904 {
26bf1588 3905 tree ptr_template_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
3906 tree null_bounds, t;
3907
3908 if (TYPE_NULL_BOUNDS (ptr_template_type))
3909 null_bounds = TYPE_NULL_BOUNDS (ptr_template_type);
3910 else
3911 {
3912 /* The template type can still be dummy at this point so we build an
3913 empty constructor. The middle-end will fill it in with zeros. */
4d585594 3914 t = build_constructor (template_type, NULL);
26bf1588 3915 TREE_CONSTANT (t) = TREE_STATIC (t) = 1;
3916 null_bounds = build_unary_op (ADDR_EXPR, NULL_TREE, t);
3917 SET_TYPE_NULL_BOUNDS (ptr_template_type, null_bounds);
3918 }
3919
5cd86571 3920 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
26bf1588 3921 fold_convert (p_array_type, null_pointer_node));
3922 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), null_bounds);
3923 t = build_constructor (type, v);
3924 /* Do not set TREE_CONSTANT so as to force T to static memory. */
3925 TREE_CONSTANT (t) = 0;
3926 TREE_STATIC (t) = 1;
3927
3928 return t;
5cd86571 3929 }
27becfc8 3930
7b4b0e11 3931 /* If EXPR is a thin pointer, make template and data from the record. */
3932 if (TYPE_IS_THIN_POINTER_P (etype))
27becfc8 3933 {
7b4b0e11 3934 tree field = TYPE_FIELDS (TREE_TYPE (etype));
27becfc8 3935
df8a2682 3936 expr = gnat_protect_expr (expr);
345e38c3 3937
3938 /* If we have a TYPE_UNCONSTRAINED_ARRAY attached to the RECORD_TYPE,
3939 the thin pointer value has been shifted so we shift it back to get
3940 the template address. */
3941 if (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)))
dd0cd1e4 3942 {
345e38c3 3943 template_addr
3944 = build_binary_op (POINTER_PLUS_EXPR, etype, expr,
3945 fold_build1 (NEGATE_EXPR, sizetype,
3946 byte_position
3947 (DECL_CHAIN (field))));
3948 template_addr
3949 = fold_convert (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))),
3950 template_addr);
dd0cd1e4 3951 }
27becfc8 3952
345e38c3 3953 /* Otherwise we explicitly take the address of the fields. */
3954 else
3955 {
3956 expr = build_unary_op (INDIRECT_REF, NULL_TREE, expr);
3957 template_addr
3958 = build_unary_op (ADDR_EXPR, NULL_TREE,
3959 build_component_ref (expr, NULL_TREE, field,
3960 false));
3961 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
3962 build_component_ref (expr, NULL_TREE,
3963 DECL_CHAIN (field),
3964 false));
3965 }
27becfc8 3966 }
d47e2843 3967
3968 /* Otherwise, build the constructor for the template. */
27becfc8 3969 else
345e38c3 3970 template_addr
3971 = build_unary_op (ADDR_EXPR, NULL_TREE,
3972 build_template (template_type, TREE_TYPE (etype),
3973 expr));
27becfc8 3974
d47e2843 3975 /* The final result is a constructor for the fat pointer.
27becfc8 3976
d47e2843 3977 If EXPR is an argument of a foreign convention subprogram, the type it
3978 points to is directly the component type. In this case, the expression
27becfc8 3979 type may not match the corresponding FIELD_DECL type at this point, so we
d47e2843 3980 call "convert" here to fix that up if necessary. This type consistency is
27becfc8 3981 required, for instance because it ensures that possible later folding of
d47e2843 3982 COMPONENT_REFs against this constructor always yields something of the
27becfc8 3983 same type as the initial reference.
3984
d47e2843 3985 Note that the call to "build_template" above is still fine because it
3986 will only refer to the provided TEMPLATE_TYPE in this case. */
345e38c3 3987 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type), convert (p_array_type, expr));
3988 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), template_addr);
5cd86571 3989 return gnat_build_constructor (type, v);
27becfc8 3990}
3991\f
27becfc8 3992/* Create an expression whose value is that of EXPR,
3993 converted to type TYPE. The TREE_TYPE of the value
3994 is always TYPE. This function implements all reasonable
3995 conversions; callers should filter out those that are
3996 not permitted by the language being compiled. */
3997
3998tree
3999convert (tree type, tree expr)
4000{
27becfc8 4001 tree etype = TREE_TYPE (expr);
4002 enum tree_code ecode = TREE_CODE (etype);
2b161576 4003 enum tree_code code = TREE_CODE (type);
27becfc8 4004
2b161576 4005 /* If the expression is already of the right type, we are done. */
4006 if (etype == type)
27becfc8 4007 return expr;
4008
4009 /* If both input and output have padding and are of variable size, do this
4010 as an unchecked conversion. Likewise if one is a mere variant of the
4011 other, so we avoid a pointless unpad/repad sequence. */
4012 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
a98f6bec 4013 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
27becfc8 4014 && (!TREE_CONSTANT (TYPE_SIZE (type))
4015 || !TREE_CONSTANT (TYPE_SIZE (etype))
76cb9822 4016 || TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
27becfc8 4017 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
4018 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
4019 ;
4020
9fb1518c 4021 /* If the output type has padding, convert to the inner type and make a
4022 constructor to build the record, unless a variable size is involved. */
a98f6bec 4023 else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
27becfc8 4024 {
f1f41a6c 4025 vec<constructor_elt, va_gc> *v;
5cd86571 4026
27becfc8 4027 /* If we previously converted from another type and our type is
4028 of variable size, remove the conversion to avoid the need for
9fb1518c 4029 variable-sized temporaries. Likewise for a conversion between
27becfc8 4030 original and packable version. */
4031 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4032 && (!TREE_CONSTANT (TYPE_SIZE (type))
4033 || (ecode == RECORD_TYPE
4034 && TYPE_NAME (etype)
4035 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
4036 expr = TREE_OPERAND (expr, 0);
4037
4038 /* If we are just removing the padding from expr, convert the original
4039 object if we have variable size in order to avoid the need for some
9fb1518c 4040 variable-sized temporaries. Likewise if the padding is a variant
27becfc8 4041 of the other, so we avoid a pointless unpad/repad sequence. */
4042 if (TREE_CODE (expr) == COMPONENT_REF
27becfc8 4043 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
4044 && (!TREE_CONSTANT (TYPE_SIZE (type))
76cb9822 4045 || TYPE_MAIN_VARIANT (type)
4046 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))
27becfc8 4047 || (ecode == RECORD_TYPE
4048 && TYPE_NAME (etype)
4049 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
4050 return convert (type, TREE_OPERAND (expr, 0));
4051
50eca4c8 4052 /* If the inner type is of self-referential size and the expression type
4053 is a record, do this as an unchecked conversion. But first pad the
4054 expression if possible to have the same size on both sides. */
2b161576 4055 if (ecode == RECORD_TYPE
9fb1518c 4056 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
50eca4c8 4057 {
2ca11b4d 4058 if (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST)
50eca4c8 4059 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
2ca11b4d 4060 false, false, false, true),
4061 expr);
50eca4c8 4062 return unchecked_convert (type, expr, false);
4063 }
27becfc8 4064
9fb1518c 4065 /* If we are converting between array types with variable size, do the
4066 final conversion as an unchecked conversion, again to avoid the need
4067 for some variable-sized temporaries. If valid, this conversion is
4068 very likely purely technical and without real effects. */
2b161576 4069 if (ecode == ARRAY_TYPE
9fb1518c 4070 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
4071 && !TREE_CONSTANT (TYPE_SIZE (etype))
4072 && !TREE_CONSTANT (TYPE_SIZE (type)))
4073 return unchecked_convert (type,
4074 convert (TREE_TYPE (TYPE_FIELDS (type)),
4075 expr),
4076 false);
4077
f1f41a6c 4078 vec_alloc (v, 1);
5cd86571 4079 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4080 convert (TREE_TYPE (TYPE_FIELDS (type)), expr));
4081 return gnat_build_constructor (type, v);
27becfc8 4082 }
4083
4084 /* If the input type has padding, remove it and convert to the output type.
4085 The conditions ordering is arranged to ensure that the output type is not
4086 a padding type here, as it is not clear whether the conversion would
4087 always be correct if this was to happen. */
a98f6bec 4088 else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
27becfc8 4089 {
4090 tree unpadded;
4091
4092 /* If we have just converted to this padded type, just get the
4093 inner expression. */
cdbc31ab 4094 if (TREE_CODE (expr) == CONSTRUCTOR)
4095 unpadded = CONSTRUCTOR_ELT (expr, 0)->value;
27becfc8 4096
4097 /* Otherwise, build an explicit component reference. */
4098 else
4099 unpadded
4100 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
4101
4102 return convert (type, unpadded);
4103 }
4104
4105 /* If the input is a biased type, adjust first. */
4106 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4107 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
6fa4bf38 4108 fold_convert (TREE_TYPE (etype), expr),
27becfc8 4109 fold_convert (TREE_TYPE (etype),
6fa4bf38 4110 TYPE_MIN_VALUE (etype))));
27becfc8 4111
4112 /* If the input is a justified modular type, we need to extract the actual
4113 object before converting it to any other type with the exceptions of an
4114 unconstrained array or of a mere type variant. It is useful to avoid the
4115 extraction and conversion in the type variant case because it could end
4116 up replacing a VAR_DECL expr by a constructor and we might be about the
4117 take the address of the result. */
4118 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
4119 && code != UNCONSTRAINED_ARRAY_TYPE
4120 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
4121 return convert (type, build_component_ref (expr, NULL_TREE,
4122 TYPE_FIELDS (etype), false));
4123
4124 /* If converting to a type that contains a template, convert to the data
4125 type and then build the template. */
4126 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
4127 {
1767a056 4128 tree obj_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
f1f41a6c 4129 vec<constructor_elt, va_gc> *v;
4130 vec_alloc (v, 2);
27becfc8 4131
4132 /* If the source already has a template, get a reference to the
4133 associated array only, as we are going to rebuild a template
4134 for the target type anyway. */
4135 expr = maybe_unconstrained_array (expr);
4136
5cd86571 4137 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4138 build_template (TREE_TYPE (TYPE_FIELDS (type)),
4139 obj_type, NULL_TREE));
c70df25e 4140 if (expr)
4141 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
4142 convert (obj_type, expr));
5cd86571 4143 return gnat_build_constructor (type, v);
27becfc8 4144 }
4145
0b1f7790 4146 /* There are some cases of expressions that we process specially. */
27becfc8 4147 switch (TREE_CODE (expr))
4148 {
4149 case ERROR_MARK:
4150 return expr;
4151
4152 case NULL_EXPR:
4153 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
4154 conversion in gnat_expand_expr. NULL_EXPR does not represent
4155 and actual value, so no conversion is needed. */
4156 expr = copy_node (expr);
4157 TREE_TYPE (expr) = type;
4158 return expr;
4159
4160 case STRING_CST:
4161 /* If we are converting a STRING_CST to another constrained array type,
4162 just make a new one in the proper type. */
4163 if (code == ecode && AGGREGATE_TYPE_P (etype)
4164 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
4165 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
4166 {
4167 expr = copy_node (expr);
4168 TREE_TYPE (expr) = type;
4169 return expr;
4170 }
4171 break;
4172
52dd2567 4173 case VECTOR_CST:
62092ba6 4174 /* If we are converting a VECTOR_CST to a mere type variant, just make
52dd2567 4175 a new one in the proper type. */
4176 if (code == ecode && gnat_types_compatible_p (type, etype))
4177 {
4178 expr = copy_node (expr);
4179 TREE_TYPE (expr) = type;
4180 return expr;
4181 }
4182
27becfc8 4183 case CONSTRUCTOR:
62092ba6 4184 /* If we are converting a CONSTRUCTOR to a mere type variant, or to
4185 another padding type around the same type, just make a new one in
4186 the proper type. */
4187 if (code == ecode
4188 && (gnat_types_compatible_p (type, etype)
4189 || (code == RECORD_TYPE
4190 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4191 && TREE_TYPE (TYPE_FIELDS (type))
4192 == TREE_TYPE (TYPE_FIELDS (etype)))))
27becfc8 4193 {
4194 expr = copy_node (expr);
4195 TREE_TYPE (expr) = type;
f1f41a6c 4196 CONSTRUCTOR_ELTS (expr) = vec_safe_copy (CONSTRUCTOR_ELTS (expr));
27becfc8 4197 return expr;
4198 }
4199
e568189f 4200 /* Likewise for a conversion between original and packable version, or
4201 conversion between types of the same size and with the same list of
4202 fields, but we have to work harder to preserve type consistency. */
27becfc8 4203 if (code == ecode
4204 && code == RECORD_TYPE
e568189f 4205 && (TYPE_NAME (type) == TYPE_NAME (etype)
4206 || tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (etype))))
4207
27becfc8 4208 {
f1f41a6c 4209 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4210 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4211 vec<constructor_elt, va_gc> *v;
4212 vec_alloc (v, len);
27becfc8 4213 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
4214 unsigned HOST_WIDE_INT idx;
4215 tree index, value;
4216
d515c104 4217 /* Whether we need to clear TREE_CONSTANT et al. on the output
4218 constructor when we convert in place. */
4219 bool clear_constant = false;
4220
27becfc8 4221 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
4222 {
62092ba6 4223 /* Skip the missing fields in the CONSTRUCTOR. */
4224 while (efield && field && !SAME_FIELD_P (efield, index))
4225 {
4226 efield = DECL_CHAIN (efield);
4227 field = DECL_CHAIN (field);
4228 }
e568189f 4229 /* The field must be the same. */
62092ba6 4230 if (!(efield && field && SAME_FIELD_P (efield, field)))
27becfc8 4231 break;
62092ba6 4232 constructor_elt elt
4233 = {field, convert (TREE_TYPE (field), value)};
f1f41a6c 4234 v->quick_push (elt);
d515c104 4235
4236 /* If packing has made this field a bitfield and the input
4237 value couldn't be emitted statically any more, we need to
4238 clear TREE_CONSTANT on our output. */
9bfb1138 4239 if (!clear_constant
4240 && TREE_CONSTANT (expr)
d515c104 4241 && !CONSTRUCTOR_BITFIELD_P (efield)
4242 && CONSTRUCTOR_BITFIELD_P (field)
4243 && !initializer_constant_valid_for_bitfield_p (value))
4244 clear_constant = true;
4245
1767a056 4246 efield = DECL_CHAIN (efield);
4247 field = DECL_CHAIN (field);
27becfc8 4248 }
4249
d515c104 4250 /* If we have been able to match and convert all the input fields
4251 to their output type, convert in place now. We'll fallback to a
4252 view conversion downstream otherwise. */
27becfc8 4253 if (idx == len)
4254 {
4255 expr = copy_node (expr);
4256 TREE_TYPE (expr) = type;
4257 CONSTRUCTOR_ELTS (expr) = v;
d515c104 4258 if (clear_constant)
9bfb1138 4259 TREE_CONSTANT (expr) = TREE_STATIC (expr) = 0;
27becfc8 4260 return expr;
4261 }
4262 }
52dd2567 4263
4264 /* Likewise for a conversion between array type and vector type with a
4265 compatible representative array. */
4266 else if (code == VECTOR_TYPE
4267 && ecode == ARRAY_TYPE
4268 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4269 etype))
4270 {
f1f41a6c 4271 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4272 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4273 vec<constructor_elt, va_gc> *v;
52dd2567 4274 unsigned HOST_WIDE_INT ix;
4275 tree value;
4276
4277 /* Build a VECTOR_CST from a *constant* array constructor. */
4278 if (TREE_CONSTANT (expr))
4279 {
4280 bool constant_p = true;
4281
4282 /* Iterate through elements and check if all constructor
4283 elements are *_CSTs. */
4284 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4285 if (!CONSTANT_CLASS_P (value))
4286 {
4287 constant_p = false;
4288 break;
4289 }
4290
4291 if (constant_p)
4292 return build_vector_from_ctor (type,
4293 CONSTRUCTOR_ELTS (expr));
4294 }
4295
4296 /* Otherwise, build a regular vector constructor. */
f1f41a6c 4297 vec_alloc (v, len);
52dd2567 4298 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4299 {
e82e4eb5 4300 constructor_elt elt = {NULL_TREE, value};
f1f41a6c 4301 v->quick_push (elt);
52dd2567 4302 }
4303 expr = copy_node (expr);
4304 TREE_TYPE (expr) = type;
4305 CONSTRUCTOR_ELTS (expr) = v;
4306 return expr;
4307 }
27becfc8 4308 break;
4309
4310 case UNCONSTRAINED_ARRAY_REF:
0b1f7790 4311 /* First retrieve the underlying array. */
4312 expr = maybe_unconstrained_array (expr);
4313 etype = TREE_TYPE (expr);
4314 ecode = TREE_CODE (etype);
4315 break;
27becfc8 4316
4317 case VIEW_CONVERT_EXPR:
4318 {
4319 /* GCC 4.x is very sensitive to type consistency overall, and view
4320 conversions thus are very frequent. Even though just "convert"ing
4321 the inner operand to the output type is fine in most cases, it
4322 might expose unexpected input/output type mismatches in special
4323 circumstances so we avoid such recursive calls when we can. */
4324 tree op0 = TREE_OPERAND (expr, 0);
4325
4326 /* If we are converting back to the original type, we can just
4327 lift the input conversion. This is a common occurrence with
4328 switches back-and-forth amongst type variants. */
4329 if (type == TREE_TYPE (op0))
4330 return op0;
4331
52dd2567 4332 /* Otherwise, if we're converting between two aggregate or vector
4333 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4334 target type in place or to just convert the inner expression. */
4335 if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4336 || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
27becfc8 4337 {
4338 /* If we are converting between mere variants, we can just
4339 substitute the VIEW_CONVERT_EXPR in place. */
4340 if (gnat_types_compatible_p (type, etype))
4341 return build1 (VIEW_CONVERT_EXPR, type, op0);
4342
4343 /* Otherwise, we may just bypass the input view conversion unless
4344 one of the types is a fat pointer, which is handled by
4345 specialized code below which relies on exact type matching. */
a98f6bec 4346 else if (!TYPE_IS_FAT_POINTER_P (type)
4347 && !TYPE_IS_FAT_POINTER_P (etype))
27becfc8 4348 return convert (type, op0);
4349 }
d949134d 4350
4351 break;
27becfc8 4352 }
27becfc8 4353
27becfc8 4354 default:
4355 break;
4356 }
4357
4358 /* Check for converting to a pointer to an unconstrained array. */
a98f6bec 4359 if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
27becfc8 4360 return convert_to_fat_pointer (type, expr);
4361
52dd2567 4362 /* If we are converting between two aggregate or vector types that are mere
4363 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4364 to a vector type from its representative array type. */
4365 else if ((code == ecode
4366 && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4367 && gnat_types_compatible_p (type, etype))
4368 || (code == VECTOR_TYPE
4369 && ecode == ARRAY_TYPE
4370 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4371 etype)))
27becfc8 4372 return build1 (VIEW_CONVERT_EXPR, type, expr);
4373
cfc3dd35 4374 /* If we are converting between tagged types, try to upcast properly. */
4375 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4376 && TYPE_ALIGN_OK (etype) && TYPE_ALIGN_OK (type))
4377 {
4378 tree child_etype = etype;
4379 do {
4380 tree field = TYPE_FIELDS (child_etype);
4381 if (DECL_NAME (field) == parent_name_id && TREE_TYPE (field) == type)
4382 return build_component_ref (expr, NULL_TREE, field, false);
4383 child_etype = TREE_TYPE (field);
4384 } while (TREE_CODE (child_etype) == RECORD_TYPE);
4385 }
4386
a88c8608 4387 /* If we are converting from a smaller form of record type back to it, just
4388 make a VIEW_CONVERT_EXPR. But first pad the expression to have the same
4389 size on both sides. */
4390 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4391 && smaller_form_type_p (etype, type))
4392 {
4393 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4394 false, false, false, true),
4395 expr);
4396 return build1 (VIEW_CONVERT_EXPR, type, expr);
4397 }
4398
27becfc8 4399 /* In all other cases of related types, make a NOP_EXPR. */
22582d86 4400 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
27becfc8 4401 return fold_convert (type, expr);
4402
4403 switch (code)
4404 {
4405 case VOID_TYPE:
4406 return fold_build1 (CONVERT_EXPR, type, expr);
4407
27becfc8 4408 case INTEGER_TYPE:
4409 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4410 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4411 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4412 return unchecked_convert (type, expr, false);
4413 else if (TYPE_BIASED_REPRESENTATION_P (type))
4414 return fold_convert (type,
4415 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4416 convert (TREE_TYPE (type), expr),
6fa4bf38 4417 convert (TREE_TYPE (type),
4418 TYPE_MIN_VALUE (type))));
27becfc8 4419
4420 /* ... fall through ... */
4421
4422 case ENUMERAL_TYPE:
69c2baa9 4423 case BOOLEAN_TYPE:
27becfc8 4424 /* If we are converting an additive expression to an integer type
4425 with lower precision, be wary of the optimization that can be
4426 applied by convert_to_integer. There are 2 problematic cases:
4427 - if the first operand was originally of a biased type,
4428 because we could be recursively called to convert it
4429 to an intermediate type and thus rematerialize the
4430 additive operator endlessly,
4431 - if the expression contains a placeholder, because an
4432 intermediate conversion that changes the sign could
4433 be inserted and thus introduce an artificial overflow
4434 at compile time when the placeholder is substituted. */
4435 if (code == INTEGER_TYPE
4436 && ecode == INTEGER_TYPE
4437 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4438 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4439 {
4440 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4441
4442 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4443 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4444 || CONTAINS_PLACEHOLDER_P (expr))
4445 return build1 (NOP_EXPR, type, expr);
4446 }
4447
4448 return fold (convert_to_integer (type, expr));
4449
4450 case POINTER_TYPE:
4451 case REFERENCE_TYPE:
7b4b0e11 4452 /* If converting between two thin pointers, adjust if needed to account
dd0cd1e4 4453 for differing offsets from the base pointer, depending on whether
4454 there is a TYPE_UNCONSTRAINED_ARRAY attached to the record type. */
a98f6bec 4455 if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
27becfc8 4456 {
dd0cd1e4 4457 tree etype_pos
4458 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)) != NULL_TREE
4459 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (etype))))
4460 : size_zero_node;
4461 tree type_pos
4462 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)) != NULL_TREE
4463 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (type))))
4464 : size_zero_node;
4465 tree byte_diff = size_diffop (type_pos, etype_pos);
7b4b0e11 4466
27becfc8 4467 expr = build1 (NOP_EXPR, type, expr);
27becfc8 4468 if (integer_zerop (byte_diff))
4469 return expr;
4470
4471 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
7b4b0e11 4472 fold_convert (sizetype, byte_diff));
27becfc8 4473 }
4474
7b4b0e11 4475 /* If converting fat pointer to normal or thin pointer, get the pointer
4476 to the array and then convert it. */
4477 if (TYPE_IS_FAT_POINTER_P (etype))
22582d86 4478 expr
4479 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
27becfc8 4480
4481 return fold (convert_to_pointer (type, expr));
4482
4483 case REAL_TYPE:
4484 return fold (convert_to_real (type, expr));
4485
4486 case RECORD_TYPE:
4487 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
5cd86571 4488 {
f1f41a6c 4489 vec<constructor_elt, va_gc> *v;
4490 vec_alloc (v, 1);
5cd86571 4491
4492 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4493 convert (TREE_TYPE (TYPE_FIELDS (type)),
4494 expr));
4495 return gnat_build_constructor (type, v);
4496 }
27becfc8 4497
4498 /* ... fall through ... */
4499
4500 case ARRAY_TYPE:
4501 /* In these cases, assume the front-end has validated the conversion.
4502 If the conversion is valid, it will be a bit-wise conversion, so
4503 it can be viewed as an unchecked conversion. */
4504 return unchecked_convert (type, expr, false);
4505
4506 case UNION_TYPE:
4507 /* This is a either a conversion between a tagged type and some
4508 subtype, which we have to mark as a UNION_TYPE because of
4509 overlapping fields or a conversion of an Unchecked_Union. */
4510 return unchecked_convert (type, expr, false);
4511
4512 case UNCONSTRAINED_ARRAY_TYPE:
52dd2567 4513 /* If the input is a VECTOR_TYPE, convert to the representative
4514 array type first. */
4515 if (ecode == VECTOR_TYPE)
4516 {
4517 expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
4518 etype = TREE_TYPE (expr);
4519 ecode = TREE_CODE (etype);
4520 }
4521
27becfc8 4522 /* If EXPR is a constrained array, take its address, convert it to a
4523 fat pointer, and then dereference it. Likewise if EXPR is a
4524 record containing both a template and a constrained array.
4525 Note that a record representing a justified modular type
4526 always represents a packed constrained array. */
4527 if (ecode == ARRAY_TYPE
4528 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4529 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4530 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4531 return
4532 build_unary_op
4533 (INDIRECT_REF, NULL_TREE,
4534 convert_to_fat_pointer (TREE_TYPE (type),
4535 build_unary_op (ADDR_EXPR,
4536 NULL_TREE, expr)));
4537
4538 /* Do something very similar for converting one unconstrained
4539 array to another. */
4540 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4541 return
4542 build_unary_op (INDIRECT_REF, NULL_TREE,
4543 convert (TREE_TYPE (type),
4544 build_unary_op (ADDR_EXPR,
4545 NULL_TREE, expr)));
4546 else
4547 gcc_unreachable ();
4548
4549 case COMPLEX_TYPE:
4550 return fold (convert_to_complex (type, expr));
4551
4552 default:
4553 gcc_unreachable ();
4554 }
4555}
1d957068 4556
4557/* Create an expression whose value is that of EXPR converted to the common
4558 index type, which is sizetype. EXPR is supposed to be in the base type
4559 of the GNAT index type. Calling it is equivalent to doing
4560
4561 convert (sizetype, expr)
4562
4563 but we try to distribute the type conversion with the knowledge that EXPR
4564 cannot overflow in its type. This is a best-effort approach and we fall
4565 back to the above expression as soon as difficulties are encountered.
4566
4567 This is necessary to overcome issues that arise when the GNAT base index
4568 type and the GCC common index type (sizetype) don't have the same size,
4569 which is quite frequent on 64-bit architectures. In this case, and if
4570 the GNAT base index type is signed but the iteration type of the loop has
4571 been forced to unsigned, the loop scalar evolution engine cannot compute
4572 a simple evolution for the general induction variables associated with the
4573 array indices, because it will preserve the wrap-around semantics in the
4574 unsigned type of their "inner" part. As a result, many loop optimizations
4575 are blocked.
4576
4577 The solution is to use a special (basic) induction variable that is at
4578 least as large as sizetype, and to express the aforementioned general
4579 induction variables in terms of this induction variable, eliminating
4580 the problematic intermediate truncation to the GNAT base index type.
4581 This is possible as long as the original expression doesn't overflow
4582 and if the middle-end hasn't introduced artificial overflows in the
4583 course of the various simplification it can make to the expression. */
4584
4585tree
4586convert_to_index_type (tree expr)
4587{
4588 enum tree_code code = TREE_CODE (expr);
4589 tree type = TREE_TYPE (expr);
4590
4591 /* If the type is unsigned, overflow is allowed so we cannot be sure that
4592 EXPR doesn't overflow. Keep it simple if optimization is disabled. */
4593 if (TYPE_UNSIGNED (type) || !optimize)
4594 return convert (sizetype, expr);
4595
4596 switch (code)
4597 {
4598 case VAR_DECL:
4599 /* The main effect of the function: replace a loop parameter with its
4600 associated special induction variable. */
4601 if (DECL_LOOP_PARM_P (expr) && DECL_INDUCTION_VAR (expr))
4602 expr = DECL_INDUCTION_VAR (expr);
4603 break;
4604
4605 CASE_CONVERT:
4606 {
4607 tree otype = TREE_TYPE (TREE_OPERAND (expr, 0));
4608 /* Bail out as soon as we suspect some sort of type frobbing. */
4609 if (TYPE_PRECISION (type) != TYPE_PRECISION (otype)
4610 || TYPE_UNSIGNED (type) != TYPE_UNSIGNED (otype))
4611 break;
4612 }
4613
4614 /* ... fall through ... */
4615
4616 case NON_LVALUE_EXPR:
4617 return fold_build1 (code, sizetype,
4618 convert_to_index_type (TREE_OPERAND (expr, 0)));
4619
4620 case PLUS_EXPR:
4621 case MINUS_EXPR:
4622 case MULT_EXPR:
4623 return fold_build2 (code, sizetype,
4624 convert_to_index_type (TREE_OPERAND (expr, 0)),
4625 convert_to_index_type (TREE_OPERAND (expr, 1)));
4626
4627 case COMPOUND_EXPR:
4628 return fold_build2 (code, sizetype, TREE_OPERAND (expr, 0),
4629 convert_to_index_type (TREE_OPERAND (expr, 1)));
4630
4631 case COND_EXPR:
4632 return fold_build3 (code, sizetype, TREE_OPERAND (expr, 0),
4633 convert_to_index_type (TREE_OPERAND (expr, 1)),
4634 convert_to_index_type (TREE_OPERAND (expr, 2)));
4635
4636 default:
4637 break;
4638 }
4639
4640 return convert (sizetype, expr);
4641}
27becfc8 4642\f
4643/* Remove all conversions that are done in EXP. This includes converting
4644 from a padded type or to a justified modular type. If TRUE_ADDRESS
4645 is true, always return the address of the containing object even if
4646 the address is not bit-aligned. */
4647
4648tree
4649remove_conversions (tree exp, bool true_address)
4650{
4651 switch (TREE_CODE (exp))
4652 {
4653 case CONSTRUCTOR:
4654 if (true_address
4655 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4656 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
4657 return
cdbc31ab 4658 remove_conversions (CONSTRUCTOR_ELT (exp, 0)->value, true);
27becfc8 4659 break;
4660
4661 case COMPONENT_REF:
a98f6bec 4662 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
27becfc8 4663 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4664 break;
4665
27becfc8 4666 CASE_CONVERT:
7ad4f11d 4667 case VIEW_CONVERT_EXPR:
4668 case NON_LVALUE_EXPR:
27becfc8 4669 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4670
4671 default:
4672 break;
4673 }
4674
4675 return exp;
4676}
4677\f
4678/* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
22582d86 4679 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
27becfc8 4680 likewise return an expression pointing to the underlying array. */
4681
4682tree
4683maybe_unconstrained_array (tree exp)
4684{
4685 enum tree_code code = TREE_CODE (exp);
e69771d4 4686 tree type = TREE_TYPE (exp);
27becfc8 4687
e69771d4 4688 switch (TREE_CODE (type))
27becfc8 4689 {
4690 case UNCONSTRAINED_ARRAY_TYPE:
4691 if (code == UNCONSTRAINED_ARRAY_REF)
4692 {
fd8d2914 4693 const bool read_only = TREE_READONLY (exp);
0b1f7790 4694 const bool no_trap = TREE_THIS_NOTRAP (exp);
4695
fd8d2914 4696 exp = TREE_OPERAND (exp, 0);
e69771d4 4697 type = TREE_TYPE (exp);
4698
fd8d2914 4699 if (TREE_CODE (exp) == COND_EXPR)
4700 {
4701 tree op1
4702 = build_unary_op (INDIRECT_REF, NULL_TREE,
4703 build_component_ref (TREE_OPERAND (exp, 1),
4704 NULL_TREE,
e69771d4 4705 TYPE_FIELDS (type),
fd8d2914 4706 false));
4707 tree op2
4708 = build_unary_op (INDIRECT_REF, NULL_TREE,
4709 build_component_ref (TREE_OPERAND (exp, 2),
4710 NULL_TREE,
e69771d4 4711 TYPE_FIELDS (type),
fd8d2914 4712 false));
4713
4714 exp = build3 (COND_EXPR,
e69771d4 4715 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))),
fd8d2914 4716 TREE_OPERAND (exp, 0), op1, op2);
4717 }
4718 else
0b1f7790 4719 {
4720 exp = build_unary_op (INDIRECT_REF, NULL_TREE,
4721 build_component_ref (exp, NULL_TREE,
e69771d4 4722 TYPE_FIELDS (type),
0b1f7790 4723 false));
4724 TREE_READONLY (exp) = read_only;
4725 TREE_THIS_NOTRAP (exp) = no_trap;
4726 }
27becfc8 4727 }
4728
4729 else if (code == NULL_EXPR)
e69771d4 4730 exp = build1 (NULL_EXPR,
4731 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type)))),
4732 TREE_OPERAND (exp, 0));
4733 break;
27becfc8 4734
4735 case RECORD_TYPE:
e69771d4 4736 /* If this is a padded type and it contains a template, convert to the
4737 unpadded type first. */
4738 if (TYPE_PADDING_P (type)
4739 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
4740 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
27becfc8 4741 {
e69771d4 4742 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
4743 type = TREE_TYPE (exp);
4744 }
4745
4746 if (TYPE_CONTAINS_TEMPLATE_P (type))
4747 {
c70df25e 4748 exp = build_simple_component_ref (exp, NULL_TREE,
4749 DECL_CHAIN (TYPE_FIELDS (type)),
4750 false);
e69771d4 4751
4752 /* If the array type is padded, convert to the unpadded type. */
c70df25e 4753 if (exp && TYPE_IS_PADDING_P (TREE_TYPE (exp)))
4754 exp = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp))), exp);
27becfc8 4755 }
27becfc8 4756 break;
4757
4758 default:
4759 break;
4760 }
4761
4762 return exp;
4763}
4764\f
4d00c918 4765/* Return true if EXPR is an expression that can be folded as an operand
a9538d68 4766 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
4d00c918 4767
4768static bool
4769can_fold_for_view_convert_p (tree expr)
4770{
4771 tree t1, t2;
4772
4773 /* The folder will fold NOP_EXPRs between integral types with the same
4774 precision (in the middle-end's sense). We cannot allow it if the
4775 types don't have the same precision in the Ada sense as well. */
4776 if (TREE_CODE (expr) != NOP_EXPR)
4777 return true;
4778
4779 t1 = TREE_TYPE (expr);
4780 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
4781
4782 /* Defer to the folder for non-integral conversions. */
4783 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
4784 return true;
4785
4786 /* Only fold conversions that preserve both precisions. */
4787 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
4788 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
4789 return true;
4790
4791 return false;
4792}
4793
27becfc8 4794/* Return an expression that does an unchecked conversion of EXPR to TYPE.
4d00c918 4795 If NOTRUNC_P is true, truncation operations should be suppressed.
4796
4797 Special care is required with (source or target) integral types whose
4798 precision is not equal to their size, to make sure we fetch or assign
4799 the value bits whose location might depend on the endianness, e.g.
4800
4801 Rmsize : constant := 8;
4802 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
4803
4804 type Bit_Array is array (1 .. Rmsize) of Boolean;
4805 pragma Pack (Bit_Array);
4806
4807 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
4808
4809 Value : Int := 2#1000_0001#;
4810 Vbits : Bit_Array := To_Bit_Array (Value);
4811
4812 we expect the 8 bits at Vbits'Address to always contain Value, while
4813 their original location depends on the endianness, at Value'Address
a9538d68 4814 on a little-endian architecture but not on a big-endian one. */
27becfc8 4815
4816tree
4817unchecked_convert (tree type, tree expr, bool notrunc_p)
4818{
4819 tree etype = TREE_TYPE (expr);
2b161576 4820 enum tree_code ecode = TREE_CODE (etype);
4821 enum tree_code code = TREE_CODE (type);
fc07fe6f 4822 tree tem;
2ca11b4d 4823 int c;
27becfc8 4824
2b161576 4825 /* If the expression is already of the right type, we are done. */
27becfc8 4826 if (etype == type)
4827 return expr;
4828
4829 /* If both types types are integral just do a normal conversion.
4830 Likewise for a conversion to an unconstrained array. */
6fa4bf38 4831 if (((INTEGRAL_TYPE_P (type)
7b4b0e11 4832 || (POINTER_TYPE_P (type) && !TYPE_IS_THIN_POINTER_P (type))
2b161576 4833 || (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type)))
6fa4bf38 4834 && (INTEGRAL_TYPE_P (etype)
a98f6bec 4835 || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
2b161576 4836 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype))))
4837 || code == UNCONSTRAINED_ARRAY_TYPE)
27becfc8 4838 {
2b161576 4839 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
27becfc8 4840 {
4841 tree ntype = copy_type (etype);
27becfc8 4842 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
4843 TYPE_MAIN_VARIANT (ntype) = ntype;
4844 expr = build1 (NOP_EXPR, ntype, expr);
4845 }
4846
2b161576 4847 if (code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
27becfc8 4848 {
4d00c918 4849 tree rtype = copy_type (type);
27becfc8 4850 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
4851 TYPE_MAIN_VARIANT (rtype) = rtype;
4d00c918 4852 expr = convert (rtype, expr);
4853 expr = build1 (NOP_EXPR, type, expr);
27becfc8 4854 }
4d00c918 4855 else
4856 expr = convert (type, expr);
27becfc8 4857 }
4858
4d00c918 4859 /* If we are converting to an integral type whose precision is not equal
17ac487b 4860 to its size, first unchecked convert to a record type that contains an
4861 field of the given precision. Then extract the field. */
2ca11b4d 4862 else if (INTEGRAL_TYPE_P (type)
4863 && TYPE_RM_SIZE (type)
27becfc8 4864 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4865 GET_MODE_BITSIZE (TYPE_MODE (type))))
4866 {
4867 tree rec_type = make_node (RECORD_TYPE);
f9ae6f95 4868 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (type));
17ac487b 4869 tree field_type, field;
4870
4871 if (TYPE_UNSIGNED (type))
4872 field_type = make_unsigned_type (prec);
4873 else
4874 field_type = make_signed_type (prec);
4875 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (type));
4876
4877 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
62092ba6 4878 NULL_TREE, bitsize_zero_node, 1, 0);
27becfc8 4879
62092ba6 4880 finish_record_type (rec_type, field, 1, false);
27becfc8 4881
4882 expr = unchecked_convert (rec_type, expr, notrunc_p);
c92ff1b0 4883 expr = build_component_ref (expr, NULL_TREE, field, false);
17ac487b 4884 expr = fold_build1 (NOP_EXPR, type, expr);
27becfc8 4885 }
4886
17ac487b 4887 /* Similarly if we are converting from an integral type whose precision is
4888 not equal to its size, first copy into a field of the given precision
4889 and unchecked convert the record type. */
2ca11b4d 4890 else if (INTEGRAL_TYPE_P (etype)
4891 && TYPE_RM_SIZE (etype)
4892 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
4893 GET_MODE_BITSIZE (TYPE_MODE (etype))))
27becfc8 4894 {
4895 tree rec_type = make_node (RECORD_TYPE);
f9ae6f95 4896 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (etype));
f1f41a6c 4897 vec<constructor_elt, va_gc> *v;
4898 vec_alloc (v, 1);
17ac487b 4899 tree field_type, field;
4900
4901 if (TYPE_UNSIGNED (etype))
4902 field_type = make_unsigned_type (prec);
4903 else
4904 field_type = make_signed_type (prec);
4905 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (etype));
4906
4907 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
62092ba6 4908 NULL_TREE, bitsize_zero_node, 1, 0);
27becfc8 4909
62092ba6 4910 finish_record_type (rec_type, field, 1, false);
27becfc8 4911
17ac487b 4912 expr = fold_build1 (NOP_EXPR, field_type, expr);
5cd86571 4913 CONSTRUCTOR_APPEND_ELT (v, field, expr);
4914 expr = gnat_build_constructor (rec_type, v);
27becfc8 4915 expr = unchecked_convert (type, expr, notrunc_p);
4916 }
4917
2ca11b4d 4918 /* If we are converting from a scalar type to a type with a different size,
4919 we need to pad to have the same size on both sides.
4920
4921 ??? We cannot do it unconditionally because unchecked conversions are
4922 used liberally by the front-end to implement polymorphism, e.g. in:
4923
4924 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
4925 return p___size__4 (p__object!(S191s.all));
4926
4927 so we skip all expressions that are references. */
4928 else if (!REFERENCE_CLASS_P (expr)
4929 && !AGGREGATE_TYPE_P (etype)
4930 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
4931 && (c = tree_int_cst_compare (TYPE_SIZE (etype), TYPE_SIZE (type))))
4932 {
4933 if (c < 0)
4934 {
4935 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4936 false, false, false, true),
4937 expr);
4938 expr = unchecked_convert (type, expr, notrunc_p);
4939 }
4940 else
4941 {
4942 tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
4943 false, false, false, true);
4944 expr = unchecked_convert (rec_type, expr, notrunc_p);
4945 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (rec_type),
4946 false);
4947 }
4948 }
4949
52dd2567 4950 /* We have a special case when we are converting between two unconstrained
4951 array types. In that case, take the address, convert the fat pointer
4952 types, and dereference. */
2b161576 4953 else if (ecode == code && code == UNCONSTRAINED_ARRAY_TYPE)
27becfc8 4954 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
4955 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
4956 build_unary_op (ADDR_EXPR, NULL_TREE,
4957 expr)));
52dd2567 4958
4959 /* Another special case is when we are converting to a vector type from its
4960 representative array type; this a regular conversion. */
2b161576 4961 else if (code == VECTOR_TYPE
4962 && ecode == ARRAY_TYPE
52dd2567 4963 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4964 etype))
4965 expr = convert (type, expr);
4966
fc07fe6f 4967 /* And, if the array type is not the representative, we try to build an
4968 intermediate vector type of which the array type is the representative
4969 and to do the unchecked conversion between the vector types, in order
4970 to enable further simplifications in the middle-end. */
4971 else if (code == VECTOR_TYPE
4972 && ecode == ARRAY_TYPE
4973 && (tem = build_vector_type_for_array (etype, NULL_TREE)))
4974 {
4975 expr = convert (tem, expr);
4976 return unchecked_convert (type, expr, notrunc_p);
4977 }
4978
62092ba6 4979 /* If we are converting a CONSTRUCTOR to a more aligned RECORD_TYPE, bump
4980 the alignment of the CONSTRUCTOR to speed up the copy operation. */
4981 else if (TREE_CODE (expr) == CONSTRUCTOR
4982 && code == RECORD_TYPE
4983 && TYPE_ALIGN (etype) < TYPE_ALIGN (type))
4984 {
4985 expr = convert (maybe_pad_type (etype, NULL_TREE, TYPE_ALIGN (type),
4986 Empty, false, false, false, true),
4987 expr);
4988 return unchecked_convert (type, expr, notrunc_p);
4989 }
4990
4991 /* Otherwise, just build a VIEW_CONVERT_EXPR of the expression. */
27becfc8 4992 else
4993 {
4994 expr = maybe_unconstrained_array (expr);
4995 etype = TREE_TYPE (expr);
2b161576 4996 ecode = TREE_CODE (etype);
4d00c918 4997 if (can_fold_for_view_convert_p (expr))
4998 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
4999 else
5000 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
27becfc8 5001 }
5002
4d00c918 5003 /* If the result is an integral type whose precision is not equal to its
5004 size, sign- or zero-extend the result. We need not do this if the input
5005 is an integral type of the same precision and signedness or if the output
27becfc8 5006 is a biased type or if both the input and output are unsigned. */
5007 if (!notrunc_p
5008 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
2b161576 5009 && !(code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
27becfc8 5010 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
5011 GET_MODE_BITSIZE (TYPE_MODE (type)))
5012 && !(INTEGRAL_TYPE_P (etype)
5013 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
5014 && operand_equal_p (TYPE_RM_SIZE (type),
5015 (TYPE_RM_SIZE (etype) != 0
5016 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
5017 0))
5018 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
5019 {
2b161576 5020 tree base_type
5021 = gnat_type_for_mode (TYPE_MODE (type), TYPE_UNSIGNED (type));
27becfc8 5022 tree shift_expr
5023 = convert (base_type,
5024 size_binop (MINUS_EXPR,
5025 bitsize_int
5026 (GET_MODE_BITSIZE (TYPE_MODE (type))),
5027 TYPE_RM_SIZE (type)));
5028 expr
5029 = convert (type,
5030 build_binary_op (RSHIFT_EXPR, base_type,
5031 build_binary_op (LSHIFT_EXPR, base_type,
5032 convert (base_type, expr),
5033 shift_expr),
5034 shift_expr));
5035 }
5036
5037 /* An unchecked conversion should never raise Constraint_Error. The code
5038 below assumes that GCC's conversion routines overflow the same way that
5039 the underlying hardware does. This is probably true. In the rare case
5040 when it is false, we can rely on the fact that such conversions are
5041 erroneous anyway. */
5042 if (TREE_CODE (expr) == INTEGER_CST)
5043 TREE_OVERFLOW (expr) = 0;
5044
5045 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
5046 show no longer constant. */
5047 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
5048 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
5049 OEP_ONLY_CONST))
5050 TREE_CONSTANT (expr) = 0;
5051
5052 return expr;
5053}
5054\f
4880a940 5055/* Return the appropriate GCC tree code for the specified GNAT_TYPE,
27becfc8 5056 the latter being a record type as predicated by Is_Record_Type. */
5057
5058enum tree_code
5059tree_code_for_record_type (Entity_Id gnat_type)
5060{
68e668ce 5061 Node_Id component_list, component;
27becfc8 5062
68e668ce 5063 /* Return UNION_TYPE if it's an Unchecked_Union whose non-discriminant
5064 fields are all in the variant part. Otherwise, return RECORD_TYPE. */
27becfc8 5065 if (!Is_Unchecked_Union (gnat_type))
5066 return RECORD_TYPE;
5067
68e668ce 5068 gnat_type = Implementation_Base_Type (gnat_type);
5069 component_list
5070 = Component_List (Type_Definition (Declaration_Node (gnat_type)));
5071
27becfc8 5072 for (component = First_Non_Pragma (Component_Items (component_list));
5073 Present (component);
5074 component = Next_Non_Pragma (component))
5075 if (Ekind (Defining_Entity (component)) == E_Component)
5076 return RECORD_TYPE;
5077
5078 return UNION_TYPE;
5079}
5080
d4b7e0f5 5081/* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
5082 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
5083 according to the presence of an alignment clause on the type or, if it
5084 is an array, on the component type. */
5085
5086bool
5087is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
5088{
5089 gnat_type = Underlying_Type (gnat_type);
5090
5091 *align_clause = Present (Alignment_Clause (gnat_type));
5092
5093 if (Is_Array_Type (gnat_type))
5094 {
5095 gnat_type = Underlying_Type (Component_Type (gnat_type));
5096 if (Present (Alignment_Clause (gnat_type)))
5097 *align_clause = true;
5098 }
5099
5100 if (!Is_Floating_Point_Type (gnat_type))
5101 return false;
5102
5103 if (UI_To_Int (Esize (gnat_type)) != 64)
5104 return false;
5105
5106 return true;
5107}
5108
5109/* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
5110 size is greater or equal to 64 bits, or an array of such a type. Set
5111 ALIGN_CLAUSE according to the presence of an alignment clause on the
5112 type or, if it is an array, on the component type. */
5113
5114bool
5115is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
5116{
5117 gnat_type = Underlying_Type (gnat_type);
5118
5119 *align_clause = Present (Alignment_Clause (gnat_type));
5120
5121 if (Is_Array_Type (gnat_type))
5122 {
5123 gnat_type = Underlying_Type (Component_Type (gnat_type));
5124 if (Present (Alignment_Clause (gnat_type)))
5125 *align_clause = true;
5126 }
5127
5128 if (!Is_Scalar_Type (gnat_type))
5129 return false;
5130
5131 if (UI_To_Int (Esize (gnat_type)) < 64)
5132 return false;
5133
5134 return true;
5135}
5136
27becfc8 5137/* Return true if GNU_TYPE is suitable as the type of a non-aliased
5138 component of an aggregate type. */
5139
5140bool
5141type_for_nonaliased_component_p (tree gnu_type)
5142{
5143 /* If the type is passed by reference, we may have pointers to the
5144 component so it cannot be made non-aliased. */
5145 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
5146 return false;
5147
5148 /* We used to say that any component of aggregate type is aliased
5149 because the front-end may take 'Reference of it. The front-end
5150 has been enhanced in the meantime so as to use a renaming instead
5151 in most cases, but the back-end can probably take the address of
5152 such a component too so we go for the conservative stance.
5153
5154 For instance, we might need the address of any array type, even
5155 if normally passed by copy, to construct a fat pointer if the
5156 component is used as an actual for an unconstrained formal.
5157
5158 Likewise for record types: even if a specific record subtype is
5159 passed by copy, the parent type might be passed by ref (e.g. if
5160 it's of variable size) and we might take the address of a child
5161 component to pass to a parent formal. We have no way to check
5162 for such conditions here. */
5163 if (AGGREGATE_TYPE_P (gnu_type))
5164 return false;
5165
5166 return true;
5167}
5168
a88c8608 5169/* Return true if TYPE is a smaller form of ORIG_TYPE. */
5170
5171bool
5172smaller_form_type_p (tree type, tree orig_type)
5173{
5174 tree size, osize;
5175
5176 /* We're not interested in variants here. */
5177 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig_type))
5178 return false;
5179
5180 /* Like a variant, a packable version keeps the original TYPE_NAME. */
5181 if (TYPE_NAME (type) != TYPE_NAME (orig_type))
5182 return false;
5183
5184 size = TYPE_SIZE (type);
5185 osize = TYPE_SIZE (orig_type);
5186
5187 if (!(TREE_CODE (size) == INTEGER_CST && TREE_CODE (osize) == INTEGER_CST))
5188 return false;
5189
5190 return tree_int_cst_lt (size, osize) != 0;
5191}
5192
0c6fd2e5 5193/* Perform final processing on global declarations. */
3a1c9df2 5194
2df77065 5195static GTY (()) tree dummy_global;
5196
27becfc8 5197void
0c6fd2e5 5198gnat_write_global_declarations (void)
27becfc8 5199{
60388043 5200 unsigned int i;
5201 tree iter;
5202
86bfd6f3 5203 /* If we have declared types as used at the global level, insert them in
0617d4e0 5204 the global hash table. We use a dummy variable for this purpose, but
5205 we need to build it unconditionally to avoid -fcompare-debug issues. */
5206 if (first_global_object_name)
86bfd6f3 5207 {
a2862f76 5208 struct varpool_node *node;
a7688838 5209 char *label;
5210
5211 ASM_FORMAT_PRIVATE_NAME (label, first_global_object_name, 0);
86bfd6f3 5212 dummy_global
a7688838 5213 = build_decl (BUILTINS_LOCATION, VAR_DECL, get_identifier (label),
5214 void_type_node);
861daf9d 5215 DECL_HARD_REGISTER (dummy_global) = 1;
86bfd6f3 5216 TREE_STATIC (dummy_global) = 1;
727d4825 5217 node = varpool_node::get_create (dummy_global);
861daf9d 5218 node->definition = 1;
02774f2d 5219 node->force_output = 1;
86bfd6f3 5220
0617d4e0 5221 if (types_used_by_cur_var_decl)
5222 while (!types_used_by_cur_var_decl->is_empty ())
5223 {
5224 tree t = types_used_by_cur_var_decl->pop ();
5225 types_used_by_var_decl_insert (t, dummy_global);
5226 }
86bfd6f3 5227 }
5228
0c6fd2e5 5229 /* Output debug information for all global type declarations first. This
5230 ensures that global types whose compilation hasn't been finalized yet,
5231 for example pointers to Taft amendment types, have their compilation
5232 finalized in the right context. */
5233 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5234 if (TREE_CODE (iter) == TYPE_DECL && !DECL_IGNORED_P (iter))
a11ea431 5235 debug_hooks->type_decl (iter, false);
0c6fd2e5 5236
5237 /* Then output the global variables. We need to do that after the debug
0094ca3a 5238 information for global types is emitted so that they are finalized. */
0c6fd2e5 5239 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5240 if (TREE_CODE (iter) == VAR_DECL)
5241 rest_of_decl_compilation (iter, true, 0);
27becfc8 5242}
5243
5244/* ************************************************************************
5245 * * GCC builtins support *
5246 * ************************************************************************ */
5247
5248/* The general scheme is fairly simple:
5249
5250 For each builtin function/type to be declared, gnat_install_builtins calls
a10d3a24 5251 internal facilities which eventually get to gnat_pushdecl, which in turn
27becfc8 5252 tracks the so declared builtin function decls in the 'builtin_decls' global
5253 datastructure. When an Intrinsic subprogram declaration is processed, we
5254 search this global datastructure to retrieve the associated BUILT_IN DECL
5255 node. */
5256
5257/* Search the chain of currently available builtin declarations for a node
5258 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
5259 found, if any, or NULL_TREE otherwise. */
5260tree
5261builtin_decl_for (tree name)
5262{
5263 unsigned i;
5264 tree decl;
5265
f1f41a6c 5266 FOR_EACH_VEC_SAFE_ELT (builtin_decls, i, decl)
27becfc8 5267 if (DECL_NAME (decl) == name)
5268 return decl;
5269
5270 return NULL_TREE;
5271}
5272
5273/* The code below eventually exposes gnat_install_builtins, which declares
5274 the builtin types and functions we might need, either internally or as
5275 user accessible facilities.
5276
5277 ??? This is a first implementation shot, still in rough shape. It is
5278 heavily inspired from the "C" family implementation, with chunks copied
5279 verbatim from there.
5280
5281 Two obvious TODO candidates are
5282 o Use a more efficient name/decl mapping scheme
5283 o Devise a middle-end infrastructure to avoid having to copy
5284 pieces between front-ends. */
5285
5286/* ----------------------------------------------------------------------- *
5287 * BUILTIN ELEMENTARY TYPES *
5288 * ----------------------------------------------------------------------- */
5289
5290/* Standard data types to be used in builtin argument declarations. */
5291
5292enum c_tree_index
5293{
5294 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
5295 CTI_STRING_TYPE,
5296 CTI_CONST_STRING_TYPE,
5297
5298 CTI_MAX
5299};
5300
5301static tree c_global_trees[CTI_MAX];
5302
5303#define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
5304#define string_type_node c_global_trees[CTI_STRING_TYPE]
5305#define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
5306
5307/* ??? In addition some attribute handlers, we currently don't support a
5308 (small) number of builtin-types, which in turns inhibits support for a
5309 number of builtin functions. */
5310#define wint_type_node void_type_node
5311#define intmax_type_node void_type_node
5312#define uintmax_type_node void_type_node
5313
5314/* Build the void_list_node (void_type_node having been created). */
5315
5316static tree
5317build_void_list_node (void)
5318{
5319 tree t = build_tree_list (NULL_TREE, void_type_node);
5320 return t;
5321}
5322
5323/* Used to help initialize the builtin-types.def table. When a type of
5324 the correct size doesn't exist, use error_mark_node instead of NULL.
5325 The later results in segfaults even when a decl using the type doesn't
5326 get invoked. */
5327
5328static tree
5329builtin_type_for_size (int size, bool unsignedp)
5330{
9bfb1138 5331 tree type = gnat_type_for_size (size, unsignedp);
27becfc8 5332 return type ? type : error_mark_node;
5333}
5334
5335/* Build/push the elementary type decls that builtin functions/types
5336 will need. */
5337
5338static void
5339install_builtin_elementary_types (void)
5340{
3e70070e 5341 signed_size_type_node = gnat_signed_type (size_type_node);
27becfc8 5342 pid_type_node = integer_type_node;
5343 void_list_node = build_void_list_node ();
5344
5345 string_type_node = build_pointer_type (char_type_node);
5346 const_string_type_node
5347 = build_pointer_type (build_qualified_type
5348 (char_type_node, TYPE_QUAL_CONST));
5349}
5350
5351/* ----------------------------------------------------------------------- *
5352 * BUILTIN FUNCTION TYPES *
5353 * ----------------------------------------------------------------------- */
5354
5355/* Now, builtin function types per se. */
5356
5357enum c_builtin_type
5358{
5359#define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
5360#define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
5361#define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
5362#define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
5363#define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5364#define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5365#define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
3c77ca67 5366#define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5367 ARG6) NAME,
5368#define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5369 ARG6, ARG7) NAME,
5370#define DEF_FUNCTION_TYPE_8(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5371 ARG6, ARG7, ARG8) NAME,
27becfc8 5372#define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
5373#define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
5374#define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
5375#define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5376#define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
3c77ca67 5377#define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
6349b8cc 5378 NAME,
5379#define DEF_FUNCTION_TYPE_VAR_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5380 ARG6, ARG7) NAME,
5381#define DEF_FUNCTION_TYPE_VAR_11(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5382 ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) NAME,
27becfc8 5383#define DEF_POINTER_TYPE(NAME, TYPE) NAME,
5384#include "builtin-types.def"
5385#undef DEF_PRIMITIVE_TYPE
5386#undef DEF_FUNCTION_TYPE_0
5387#undef DEF_FUNCTION_TYPE_1
5388#undef DEF_FUNCTION_TYPE_2
5389#undef DEF_FUNCTION_TYPE_3
5390#undef DEF_FUNCTION_TYPE_4
5391#undef DEF_FUNCTION_TYPE_5
5392#undef DEF_FUNCTION_TYPE_6
5393#undef DEF_FUNCTION_TYPE_7
bc7bff74 5394#undef DEF_FUNCTION_TYPE_8
27becfc8 5395#undef DEF_FUNCTION_TYPE_VAR_0
5396#undef DEF_FUNCTION_TYPE_VAR_1
5397#undef DEF_FUNCTION_TYPE_VAR_2
5398#undef DEF_FUNCTION_TYPE_VAR_3
5399#undef DEF_FUNCTION_TYPE_VAR_4
5400#undef DEF_FUNCTION_TYPE_VAR_5
6349b8cc 5401#undef DEF_FUNCTION_TYPE_VAR_7
5402#undef DEF_FUNCTION_TYPE_VAR_11
27becfc8 5403#undef DEF_POINTER_TYPE
5404 BT_LAST
5405};
5406
5407typedef enum c_builtin_type builtin_type;
5408
5409/* A temporary array used in communication with def_fn_type. */
5410static GTY(()) tree builtin_types[(int) BT_LAST + 1];
5411
5412/* A helper function for install_builtin_types. Build function type
5413 for DEF with return type RET and N arguments. If VAR is true, then the
5414 function should be variadic after those N arguments.
5415
5416 Takes special care not to ICE if any of the types involved are
5417 error_mark_node, which indicates that said type is not in fact available
5418 (see builtin_type_for_size). In which case the function type as a whole
5419 should be error_mark_node. */
5420
5421static void
5422def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
5423{
ce497e33 5424 tree t;
5425 tree *args = XALLOCAVEC (tree, n);
27becfc8 5426 va_list list;
5427 int i;
5428
5429 va_start (list, n);
5430 for (i = 0; i < n; ++i)
5431 {
c88e6a4f 5432 builtin_type a = (builtin_type) va_arg (list, int);
27becfc8 5433 t = builtin_types[a];
5434 if (t == error_mark_node)
5435 goto egress;
ce497e33 5436 args[i] = t;
27becfc8 5437 }
27becfc8 5438
27becfc8 5439 t = builtin_types[ret];
5440 if (t == error_mark_node)
5441 goto egress;
ce497e33 5442 if (var)
5443 t = build_varargs_function_type_array (t, n, args);
5444 else
5445 t = build_function_type_array (t, n, args);
27becfc8 5446
5447 egress:
5448 builtin_types[def] = t;
451c8e2f 5449 va_end (list);
27becfc8 5450}
5451
5452/* Build the builtin function types and install them in the builtin_types
5453 array for later use in builtin function decls. */
5454
5455static void
5456install_builtin_function_types (void)
5457{
5458 tree va_list_ref_type_node;
5459 tree va_list_arg_type_node;
5460
5461 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
5462 {
5463 va_list_arg_type_node = va_list_ref_type_node =
5464 build_pointer_type (TREE_TYPE (va_list_type_node));
5465 }
5466 else
5467 {
5468 va_list_arg_type_node = va_list_type_node;
5469 va_list_ref_type_node = build_reference_type (va_list_type_node);
5470 }
5471
5472#define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
5473 builtin_types[ENUM] = VALUE;
5474#define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
5475 def_fn_type (ENUM, RETURN, 0, 0);
5476#define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
5477 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
5478#define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
5479 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
5480#define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5481 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
5482#define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5483 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
5484#define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5485 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5486#define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5487 ARG6) \
5488 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5489#define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5490 ARG6, ARG7) \
5491 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
bc7bff74 5492#define DEF_FUNCTION_TYPE_8(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5493 ARG6, ARG7, ARG8) \
5494 def_fn_type (ENUM, RETURN, 0, 8, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5495 ARG7, ARG8);
27becfc8 5496#define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
5497 def_fn_type (ENUM, RETURN, 1, 0);
5498#define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
5499 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
5500#define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
5501 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
5502#define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5503 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
5504#define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5505 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
5506#define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5507 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
6349b8cc 5508#define DEF_FUNCTION_TYPE_VAR_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5509 ARG6, ARG7) \
5510 def_fn_type (ENUM, RETURN, 1, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5511#define DEF_FUNCTION_TYPE_VAR_11(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5512 ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) \
5513 def_fn_type (ENUM, RETURN, 1, 11, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5514 ARG7, ARG8, ARG9, ARG10, ARG11);
27becfc8 5515#define DEF_POINTER_TYPE(ENUM, TYPE) \
5516 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
5517
5518#include "builtin-types.def"
5519
5520#undef DEF_PRIMITIVE_TYPE
3c77ca67 5521#undef DEF_FUNCTION_TYPE_0
27becfc8 5522#undef DEF_FUNCTION_TYPE_1
5523#undef DEF_FUNCTION_TYPE_2
5524#undef DEF_FUNCTION_TYPE_3
5525#undef DEF_FUNCTION_TYPE_4
5526#undef DEF_FUNCTION_TYPE_5
5527#undef DEF_FUNCTION_TYPE_6
3c77ca67 5528#undef DEF_FUNCTION_TYPE_7
5529#undef DEF_FUNCTION_TYPE_8
27becfc8 5530#undef DEF_FUNCTION_TYPE_VAR_0
5531#undef DEF_FUNCTION_TYPE_VAR_1
5532#undef DEF_FUNCTION_TYPE_VAR_2
5533#undef DEF_FUNCTION_TYPE_VAR_3
5534#undef DEF_FUNCTION_TYPE_VAR_4
5535#undef DEF_FUNCTION_TYPE_VAR_5
6349b8cc 5536#undef DEF_FUNCTION_TYPE_VAR_7
5537#undef DEF_FUNCTION_TYPE_VAR_11
27becfc8 5538#undef DEF_POINTER_TYPE
5539 builtin_types[(int) BT_LAST] = NULL_TREE;
5540}
5541
5542/* ----------------------------------------------------------------------- *
5543 * BUILTIN ATTRIBUTES *
5544 * ----------------------------------------------------------------------- */
5545
5546enum built_in_attribute
5547{
5548#define DEF_ATTR_NULL_TREE(ENUM) ENUM,
5549#define DEF_ATTR_INT(ENUM, VALUE) ENUM,
c8010b80 5550#define DEF_ATTR_STRING(ENUM, VALUE) ENUM,
27becfc8 5551#define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
5552#define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
5553#include "builtin-attrs.def"
5554#undef DEF_ATTR_NULL_TREE
5555#undef DEF_ATTR_INT
c8010b80 5556#undef DEF_ATTR_STRING
27becfc8 5557#undef DEF_ATTR_IDENT
5558#undef DEF_ATTR_TREE_LIST
5559 ATTR_LAST
5560};
5561
5562static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
5563
5564static void
5565install_builtin_attributes (void)
5566{
5567 /* Fill in the built_in_attributes array. */
5568#define DEF_ATTR_NULL_TREE(ENUM) \
5569 built_in_attributes[(int) ENUM] = NULL_TREE;
5570#define DEF_ATTR_INT(ENUM, VALUE) \
5571 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
c8010b80 5572#define DEF_ATTR_STRING(ENUM, VALUE) \
5573 built_in_attributes[(int) ENUM] = build_string (strlen (VALUE), VALUE);
27becfc8 5574#define DEF_ATTR_IDENT(ENUM, STRING) \
5575 built_in_attributes[(int) ENUM] = get_identifier (STRING);
5576#define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5577 built_in_attributes[(int) ENUM] \
5578 = tree_cons (built_in_attributes[(int) PURPOSE], \
5579 built_in_attributes[(int) VALUE], \
5580 built_in_attributes[(int) CHAIN]);
5581#include "builtin-attrs.def"
5582#undef DEF_ATTR_NULL_TREE
5583#undef DEF_ATTR_INT
c8010b80 5584#undef DEF_ATTR_STRING
27becfc8 5585#undef DEF_ATTR_IDENT
5586#undef DEF_ATTR_TREE_LIST
5587}
5588
5589/* Handle a "const" attribute; arguments as in
5590 struct attribute_spec.handler. */
5591
5592static tree
5593handle_const_attribute (tree *node, tree ARG_UNUSED (name),
5594 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5595 bool *no_add_attrs)
5596{
5597 if (TREE_CODE (*node) == FUNCTION_DECL)
5598 TREE_READONLY (*node) = 1;
5599 else
5600 *no_add_attrs = true;
5601
5602 return NULL_TREE;
5603}
5604
5605/* Handle a "nothrow" attribute; arguments as in
5606 struct attribute_spec.handler. */
5607
5608static tree
5609handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
5610 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5611 bool *no_add_attrs)
5612{
5613 if (TREE_CODE (*node) == FUNCTION_DECL)
5614 TREE_NOTHROW (*node) = 1;
5615 else
5616 *no_add_attrs = true;
5617
5618 return NULL_TREE;
5619}
5620
5621/* Handle a "pure" attribute; arguments as in
5622 struct attribute_spec.handler. */
5623
5624static tree
5625handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5626 int ARG_UNUSED (flags), bool *no_add_attrs)
5627{
5628 if (TREE_CODE (*node) == FUNCTION_DECL)
5629 DECL_PURE_P (*node) = 1;
5630 /* ??? TODO: Support types. */
5631 else
5632 {
52dd2567 5633 warning (OPT_Wattributes, "%qs attribute ignored",
5634 IDENTIFIER_POINTER (name));
27becfc8 5635 *no_add_attrs = true;
5636 }
5637
5638 return NULL_TREE;
5639}
5640
5641/* Handle a "no vops" attribute; arguments as in
5642 struct attribute_spec.handler. */
5643
5644static tree
5645handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
5646 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5647 bool *ARG_UNUSED (no_add_attrs))
5648{
5649 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
5650 DECL_IS_NOVOPS (*node) = 1;
5651 return NULL_TREE;
5652}
5653
5654/* Helper for nonnull attribute handling; fetch the operand number
5655 from the attribute argument list. */
5656
5657static bool
5658get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
5659{
5660 /* Verify the arg number is a constant. */
e1d65c9f 5661 if (!tree_fits_uhwi_p (arg_num_expr))
27becfc8 5662 return false;
5663
f9ae6f95 5664 *valp = TREE_INT_CST_LOW (arg_num_expr);
27becfc8 5665 return true;
5666}
5667
5668/* Handle the "nonnull" attribute. */
5669static tree
5670handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
5671 tree args, int ARG_UNUSED (flags),
5672 bool *no_add_attrs)
5673{
5674 tree type = *node;
5675 unsigned HOST_WIDE_INT attr_arg_num;
5676
5677 /* If no arguments are specified, all pointer arguments should be
5678 non-null. Verify a full prototype is given so that the arguments
5679 will have the correct types when we actually check them later. */
5680 if (!args)
5681 {
a36cf284 5682 if (!prototype_p (type))
27becfc8 5683 {
5684 error ("nonnull attribute without arguments on a non-prototype");
5685 *no_add_attrs = true;
5686 }
5687 return NULL_TREE;
5688 }
5689
5690 /* Argument list specified. Verify that each argument number references
5691 a pointer argument. */
5692 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
5693 {
27becfc8 5694 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
5695
5696 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
5697 {
5698 error ("nonnull argument has invalid operand number (argument %lu)",
5699 (unsigned long) attr_arg_num);
5700 *no_add_attrs = true;
5701 return NULL_TREE;
5702 }
5703
3e33edbe 5704 if (prototype_p (type))
27becfc8 5705 {
3e33edbe 5706 function_args_iterator iter;
5707 tree argument;
5708
5709 function_args_iter_init (&iter, type);
5710 for (ck_num = 1; ; ck_num++, function_args_iter_next (&iter))
27becfc8 5711 {
3e33edbe 5712 argument = function_args_iter_cond (&iter);
27becfc8 5713 if (!argument || ck_num == arg_num)
5714 break;
27becfc8 5715 }
5716
5717 if (!argument
3e33edbe 5718 || TREE_CODE (argument) == VOID_TYPE)
27becfc8 5719 {
ac45dde2 5720 error ("nonnull argument with out-of-range operand number "
5721 "(argument %lu, operand %lu)",
27becfc8 5722 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5723 *no_add_attrs = true;
5724 return NULL_TREE;
5725 }
5726
3e33edbe 5727 if (TREE_CODE (argument) != POINTER_TYPE)
27becfc8 5728 {
ac45dde2 5729 error ("nonnull argument references non-pointer operand "
5730 "(argument %lu, operand %lu)",
27becfc8 5731 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5732 *no_add_attrs = true;
5733 return NULL_TREE;
5734 }
5735 }
5736 }
5737
5738 return NULL_TREE;
5739}
5740
5741/* Handle a "sentinel" attribute. */
5742
5743static tree
5744handle_sentinel_attribute (tree *node, tree name, tree args,
5745 int ARG_UNUSED (flags), bool *no_add_attrs)
5746{
a36cf284 5747 if (!prototype_p (*node))
27becfc8 5748 {
5749 warning (OPT_Wattributes,
52dd2567 5750 "%qs attribute requires prototypes with named arguments",
5751 IDENTIFIER_POINTER (name));
27becfc8 5752 *no_add_attrs = true;
5753 }
5754 else
5755 {
c33080b9 5756 if (!stdarg_p (*node))
27becfc8 5757 {
5758 warning (OPT_Wattributes,
52dd2567 5759 "%qs attribute only applies to variadic functions",
5760 IDENTIFIER_POINTER (name));
27becfc8 5761 *no_add_attrs = true;
5762 }
5763 }
5764
5765 if (args)
5766 {
5767 tree position = TREE_VALUE (args);
5768
5769 if (TREE_CODE (position) != INTEGER_CST)
5770 {
5771 warning (0, "requested position is not an integer constant");
5772 *no_add_attrs = true;
5773 }
5774 else
5775 {
5776 if (tree_int_cst_lt (position, integer_zero_node))
5777 {
5778 warning (0, "requested position is less than zero");
5779 *no_add_attrs = true;
5780 }
5781 }
5782 }
5783
5784 return NULL_TREE;
5785}
5786
5787/* Handle a "noreturn" attribute; arguments as in
5788 struct attribute_spec.handler. */
5789
5790static tree
5791handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5792 int ARG_UNUSED (flags), bool *no_add_attrs)
5793{
5794 tree type = TREE_TYPE (*node);
5795
5796 /* See FIXME comment in c_common_attribute_table. */
5797 if (TREE_CODE (*node) == FUNCTION_DECL)
5798 TREE_THIS_VOLATILE (*node) = 1;
5799 else if (TREE_CODE (type) == POINTER_TYPE
5800 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
5801 TREE_TYPE (*node)
5802 = build_pointer_type
5803 (build_type_variant (TREE_TYPE (type),
5804 TYPE_READONLY (TREE_TYPE (type)), 1));
5805 else
5806 {
52dd2567 5807 warning (OPT_Wattributes, "%qs attribute ignored",
5808 IDENTIFIER_POINTER (name));
27becfc8 5809 *no_add_attrs = true;
5810 }
5811
5812 return NULL_TREE;
5813}
5814
4a68fd3c 5815/* Handle a "leaf" attribute; arguments as in
5816 struct attribute_spec.handler. */
5817
5818static tree
9fac98bb 5819handle_leaf_attribute (tree *node, tree name, tree ARG_UNUSED (args),
4a68fd3c 5820 int ARG_UNUSED (flags), bool *no_add_attrs)
5821{
5822 if (TREE_CODE (*node) != FUNCTION_DECL)
5823 {
5824 warning (OPT_Wattributes, "%qE attribute ignored", name);
5825 *no_add_attrs = true;
5826 }
5827 if (!TREE_PUBLIC (*node))
5828 {
c4540f0c 5829 warning (OPT_Wattributes, "%qE attribute has no effect", name);
4a68fd3c 5830 *no_add_attrs = true;
9fac98bb 5831 }
5832
5833 return NULL_TREE;
5834}
5835
5836/* Handle a "always_inline" attribute; arguments as in
5837 struct attribute_spec.handler. */
5838
5839static tree
5840handle_always_inline_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5841 int ARG_UNUSED (flags), bool *no_add_attrs)
5842{
5843 if (TREE_CODE (*node) == FUNCTION_DECL)
5844 {
5845 /* Set the attribute and mark it for disregarding inline limits. */
5846 DECL_DISREGARD_INLINE_LIMITS (*node) = 1;
5847 }
5848 else
5849 {
5850 warning (OPT_Wattributes, "%qE attribute ignored", name);
5851 *no_add_attrs = true;
4a68fd3c 5852 }
5853
5854 return NULL_TREE;
5855}
5856
27becfc8 5857/* Handle a "malloc" attribute; arguments as in
5858 struct attribute_spec.handler. */
5859
5860static tree
5861handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5862 int ARG_UNUSED (flags), bool *no_add_attrs)
5863{
5864 if (TREE_CODE (*node) == FUNCTION_DECL
5865 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
5866 DECL_IS_MALLOC (*node) = 1;
5867 else
5868 {
52dd2567 5869 warning (OPT_Wattributes, "%qs attribute ignored",
5870 IDENTIFIER_POINTER (name));
27becfc8 5871 *no_add_attrs = true;
5872 }
5873
5874 return NULL_TREE;
5875}
5876
5877/* Fake handler for attributes we don't properly support. */
5878
5879tree
5880fake_attribute_handler (tree * ARG_UNUSED (node),
5881 tree ARG_UNUSED (name),
5882 tree ARG_UNUSED (args),
5883 int ARG_UNUSED (flags),
5884 bool * ARG_UNUSED (no_add_attrs))
5885{
5886 return NULL_TREE;
5887}
5888
5889/* Handle a "type_generic" attribute. */
5890
5891static tree
5892handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
5893 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5894 bool * ARG_UNUSED (no_add_attrs))
5895{
27becfc8 5896 /* Ensure we have a function type. */
5897 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
153edb51 5898
27becfc8 5899 /* Ensure we have a variadic function. */
c33080b9 5900 gcc_assert (!prototype_p (*node) || stdarg_p (*node));
27becfc8 5901
5902 return NULL_TREE;
5903}
5904
5595eac6 5905/* Handle a "vector_size" attribute; arguments as in
5906 struct attribute_spec.handler. */
5907
5908static tree
5909handle_vector_size_attribute (tree *node, tree name, tree args,
fc07fe6f 5910 int ARG_UNUSED (flags), bool *no_add_attrs)
5595eac6 5911{
fc07fe6f 5912 tree type = *node;
5913 tree vector_type;
5595eac6 5914
5915 *no_add_attrs = true;
5916
5595eac6 5917 /* We need to provide for vector pointers, vector arrays, and
5918 functions returning vectors. For example:
5919
5920 __attribute__((vector_size(16))) short *foo;
5921
5922 In this case, the mode is SI, but the type being modified is
5923 HI, so we need to look further. */
5595eac6 5924 while (POINTER_TYPE_P (type)
5925 || TREE_CODE (type) == FUNCTION_TYPE
e00e17ad 5926 || TREE_CODE (type) == ARRAY_TYPE)
5595eac6 5927 type = TREE_TYPE (type);
5928
fc07fe6f 5929 vector_type = build_vector_type_for_size (type, TREE_VALUE (args), name);
5930 if (!vector_type)
5931 return NULL_TREE;
5595eac6 5932
5933 /* Build back pointers if needed. */
fc07fe6f 5934 *node = reconstruct_complex_type (*node, vector_type);
5595eac6 5935
5936 return NULL_TREE;
5937}
5938
52dd2567 5939/* Handle a "vector_type" attribute; arguments as in
5940 struct attribute_spec.handler. */
5941
5942static tree
5943handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
fc07fe6f 5944 int ARG_UNUSED (flags), bool *no_add_attrs)
52dd2567 5945{
fc07fe6f 5946 tree type = *node;
5947 tree vector_type;
52dd2567 5948
5949 *no_add_attrs = true;
5950
fc07fe6f 5951 if (TREE_CODE (type) != ARRAY_TYPE)
52dd2567 5952 {
5953 error ("attribute %qs applies to array types only",
5954 IDENTIFIER_POINTER (name));
5955 return NULL_TREE;
5956 }
5957
fc07fe6f 5958 vector_type = build_vector_type_for_array (type, name);
5959 if (!vector_type)
52dd2567 5960 return NULL_TREE;
5961
fc07fe6f 5962 TYPE_REPRESENTATIVE_ARRAY (vector_type) = type;
5963 *node = vector_type;
52dd2567 5964
5965 return NULL_TREE;
5966}
5967
27becfc8 5968/* ----------------------------------------------------------------------- *
5969 * BUILTIN FUNCTIONS *
5970 * ----------------------------------------------------------------------- */
5971
5972/* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
5973 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
5974 if nonansi_p and flag_no_nonansi_builtin. */
5975
5976static void
5977def_builtin_1 (enum built_in_function fncode,
5978 const char *name,
5979 enum built_in_class fnclass,
5980 tree fntype, tree libtype,
5981 bool both_p, bool fallback_p,
5982 bool nonansi_p ATTRIBUTE_UNUSED,
5983 tree fnattrs, bool implicit_p)
5984{
5985 tree decl;
5986 const char *libname;
5987
5988 /* Preserve an already installed decl. It most likely was setup in advance
5989 (e.g. as part of the internal builtins) for specific reasons. */
b9a16870 5990 if (builtin_decl_explicit (fncode) != NULL_TREE)
27becfc8 5991 return;
5992
5993 gcc_assert ((!both_p && !fallback_p)
5994 || !strncmp (name, "__builtin_",
5995 strlen ("__builtin_")));
5996
5997 libname = name + strlen ("__builtin_");
5998 decl = add_builtin_function (name, fntype, fncode, fnclass,
5999 (fallback_p ? libname : NULL),
6000 fnattrs);
6001 if (both_p)
6002 /* ??? This is normally further controlled by command-line options
6003 like -fno-builtin, but we don't have them for Ada. */
6004 add_builtin_function (libname, libtype, fncode, fnclass,
6005 NULL, fnattrs);
6006
b9a16870 6007 set_builtin_decl (fncode, decl, implicit_p);
27becfc8 6008}
6009
6010static int flag_isoc94 = 0;
6011static int flag_isoc99 = 0;
060fc206 6012static int flag_isoc11 = 0;
27becfc8 6013
6014/* Install what the common builtins.def offers. */
6015
6016static void
6017install_builtin_functions (void)
6018{
6019#define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
6020 NONANSI_P, ATTRS, IMPLICIT, COND) \
6021 if (NAME && COND) \
6022 def_builtin_1 (ENUM, NAME, CLASS, \
6023 builtin_types[(int) TYPE], \
6024 builtin_types[(int) LIBTYPE], \
6025 BOTH_P, FALLBACK_P, NONANSI_P, \
6026 built_in_attributes[(int) ATTRS], IMPLICIT);
6027#include "builtins.def"
6028#undef DEF_BUILTIN
6029}
6030
6031/* ----------------------------------------------------------------------- *
6032 * BUILTIN FUNCTIONS *
6033 * ----------------------------------------------------------------------- */
6034
6035/* Install the builtin functions we might need. */
6036
6037void
6038gnat_install_builtins (void)
6039{
6040 install_builtin_elementary_types ();
6041 install_builtin_function_types ();
6042 install_builtin_attributes ();
6043
6044 /* Install builtins used by generic middle-end pieces first. Some of these
6045 know about internal specificities and control attributes accordingly, for
6046 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
6047 the generic definition from builtins.def. */
471eff36 6048 build_common_builtin_nodes ();
27becfc8 6049
6050 /* Now, install the target specific builtins, such as the AltiVec family on
6051 ppc, and the common set as exposed by builtins.def. */
6052 targetm.init_builtins ();
6053 install_builtin_functions ();
6054}
6055
6056#include "gt-ada-utils.h"
6057#include "gtype-ada.h"