]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ada/gcc-interface/utils.c
[21/77] Replace SCALAR_INT_MODE_P checks with is_a <scalar_int_mode>
[thirdparty/gcc.git] / gcc / ada / gcc-interface / utils.c
CommitLineData
27becfc8 1/****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
16b68428 9 * Copyright (C) 1992-2017, Free Software Foundation, Inc. *
27becfc8 10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
25
27becfc8 26#include "config.h"
27#include "system.h"
28#include "coretypes.h"
4cba6f60 29#include "target.h"
30#include "function.h"
27becfc8 31#include "tree.h"
9ed99284 32#include "stringpool.h"
4cba6f60 33#include "cgraph.h"
34#include "diagnostic.h"
35#include "alias.h"
36#include "fold-const.h"
9ed99284 37#include "stor-layout.h"
38#include "attribs.h"
39#include "varasm.h"
27becfc8 40#include "toplev.h"
41#include "output.h"
27becfc8 42#include "debug.h"
43#include "convert.h"
218e3e4e 44#include "common/common-target.h"
a9a42d49 45#include "langhooks.h"
a9a42d49 46#include "tree-dump.h"
27becfc8 47#include "tree-inline.h"
27becfc8 48
49#include "ada.h"
50#include "types.h"
51#include "atree.h"
27becfc8 52#include "nlists.h"
27becfc8 53#include "uintp.h"
54#include "fe.h"
55#include "sinfo.h"
56#include "einfo.h"
57#include "ada-tree.h"
58#include "gigi.h"
59
27becfc8 60/* If nonzero, pretend we are allocating at global level. */
61int force_global;
62
d4b7e0f5 63/* The default alignment of "double" floating-point types, i.e. floating
64 point types whose size is equal to 64 bits, or 0 if this alignment is
65 not specifically capped. */
66int double_float_alignment;
67
68/* The default alignment of "double" or larger scalar types, i.e. scalar
69 types whose size is greater or equal to 64 bits, or 0 if this alignment
70 is not specifically capped. */
71int double_scalar_alignment;
72
b6f6bb02 73/* True if floating-point arithmetics may use wider intermediate results. */
74bool fp_arith_may_widen = true;
75
27becfc8 76/* Tree nodes for the various types and decls we create. */
77tree gnat_std_decls[(int) ADT_LAST];
78
79/* Functions to call for each of the possible raise reasons. */
80tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
81
9a710cc4 82/* Likewise, but with extra info for each of the possible raise reasons. */
9f294c82 83tree gnat_raise_decls_ext[(int) LAST_REASON_CODE + 1];
84
27becfc8 85/* Forward declarations for handlers of attributes. */
86static tree handle_const_attribute (tree *, tree, tree, int, bool *);
87static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
88static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
89static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
90static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
91static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
92static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
02e0316b 93static tree handle_noinline_attribute (tree *, tree, tree, int, bool *);
94static tree handle_noclone_attribute (tree *, tree, tree, int, bool *);
4a68fd3c 95static tree handle_leaf_attribute (tree *, tree, tree, int, bool *);
9fac98bb 96static tree handle_always_inline_attribute (tree *, tree, tree, int, bool *);
27becfc8 97static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
98static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
5595eac6 99static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
52dd2567 100static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
27becfc8 101
102/* Fake handler for attributes we don't properly support, typically because
103 they'd require dragging a lot of the common-c front-end circuitry. */
104static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
105
106/* Table of machine-independent internal attributes for Ada. We support
107 this minimal set of attributes to accommodate the needs of builtins. */
108const struct attribute_spec gnat_internal_attribute_table[] =
109{
ac86af5d 110 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
111 affects_type_identity } */
112 { "const", 0, 0, true, false, false, handle_const_attribute,
113 false },
114 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute,
115 false },
116 { "pure", 0, 0, true, false, false, handle_pure_attribute,
117 false },
118 { "no vops", 0, 0, true, false, false, handle_novops_attribute,
119 false },
120 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute,
121 false },
122 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute,
123 false },
124 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute,
125 false },
02e0316b 126 { "noinline", 0, 0, true, false, false, handle_noinline_attribute,
127 false },
128 { "noclone", 0, 0, true, false, false, handle_noclone_attribute,
129 false },
ac86af5d 130 { "leaf", 0, 0, true, false, false, handle_leaf_attribute,
131 false },
9fac98bb 132 { "always_inline",0, 0, true, false, false, handle_always_inline_attribute,
133 false },
ac86af5d 134 { "malloc", 0, 0, true, false, false, handle_malloc_attribute,
135 false },
136 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute,
137 false },
138
139 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute,
140 false },
141 { "vector_type", 0, 0, false, true, false, handle_vector_type_attribute,
142 false },
143 { "may_alias", 0, 0, false, true, false, NULL, false },
27becfc8 144
145 /* ??? format and format_arg are heavy and not supported, which actually
146 prevents support for stdio builtins, which we however declare as part
147 of the common builtins.def contents. */
ac86af5d 148 { "format", 3, 3, false, true, true, fake_attribute_handler, false },
149 { "format_arg", 1, 1, false, true, true, fake_attribute_handler, false },
27becfc8 150
ac86af5d 151 { NULL, 0, 0, false, false, false, NULL, false }
27becfc8 152};
153
154/* Associates a GNAT tree node to a GCC tree node. It is used in
155 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
156 of `save_gnu_tree' for more info. */
157static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
158
159#define GET_GNU_TREE(GNAT_ENTITY) \
160 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
161
162#define SET_GNU_TREE(GNAT_ENTITY,VAL) \
163 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
164
165#define PRESENT_GNU_TREE(GNAT_ENTITY) \
166 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
167
168/* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
169static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
170
171#define GET_DUMMY_NODE(GNAT_ENTITY) \
172 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
173
174#define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
175 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
176
177#define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
178 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
179
180/* This variable keeps a table for types for each precision so that we only
181 allocate each of them once. Signed and unsigned types are kept separate.
182
183 Note that these types are only used when fold-const requests something
184 special. Perhaps we should NOT share these types; we'll see how it
185 goes later. */
186static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
187
188/* Likewise for float types, but record these by mode. */
189static GTY(()) tree float_types[NUM_MACHINE_MODES];
190
191/* For each binding contour we allocate a binding_level structure to indicate
192 the binding depth. */
193
fb1e4f4a 194struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
27becfc8 195 /* The binding level containing this one (the enclosing binding level). */
196 struct gnat_binding_level *chain;
197 /* The BLOCK node for this level. */
198 tree block;
199 /* If nonzero, the setjmp buffer that needs to be updated for any
200 variable-sized definition within this context. */
201 tree jmpbuf_decl;
202};
203
204/* The binding level currently in effect. */
205static GTY(()) struct gnat_binding_level *current_binding_level;
206
207/* A chain of gnat_binding_level structures awaiting reuse. */
208static GTY((deletable)) struct gnat_binding_level *free_binding_level;
209
eac916f9 210/* The context to be used for global declarations. */
211static GTY(()) tree global_context;
212
0c6fd2e5 213/* An array of global declarations. */
214static GTY(()) vec<tree, va_gc> *global_decls;
27becfc8 215
216/* An array of builtin function declarations. */
f1f41a6c 217static GTY(()) vec<tree, va_gc> *builtin_decls;
27becfc8 218
27becfc8 219/* A chain of unused BLOCK nodes. */
220static GTY((deletable)) tree free_block_chain;
221
76cb9822 222/* A hash table of padded types. It is modelled on the generic type
223 hash table in tree.c, which must thus be used as a reference. */
d1023d12 224
225struct GTY((for_user)) pad_type_hash {
76cb9822 226 unsigned long hash;
227 tree type;
228};
229
eae1ecb4 230struct pad_type_hasher : ggc_cache_ptr_hash<pad_type_hash>
d1023d12 231{
232 static inline hashval_t hash (pad_type_hash *t) { return t->hash; }
233 static bool equal (pad_type_hash *a, pad_type_hash *b);
99378011 234 static int keep_cache_entry (pad_type_hash *&);
d1023d12 235};
236
237static GTY ((cache))
238 hash_table<pad_type_hasher> *pad_type_hash_table;
76cb9822 239
27becfc8 240static tree merge_sizes (tree, tree, tree, bool, bool);
42a9a9c6 241static tree fold_bit_position (const_tree);
27becfc8 242static tree compute_related_constant (tree, tree);
243static tree split_plus (tree, tree *);
3754d046 244static tree float_type_for_precision (int, machine_mode);
27becfc8 245static tree convert_to_fat_pointer (tree, tree);
819ab09a 246static unsigned int scale_by_factor_of (tree, unsigned int);
27becfc8 247static bool potential_alignment_gap (tree, tree, tree);
810e94f8 248
baaf92dc 249/* Linked list used as a queue to defer the initialization of the DECL_CONTEXT
250 of ..._DECL nodes and of the TYPE_CONTEXT of ..._TYPE nodes. */
810e94f8 251struct deferred_decl_context_node
252{
baaf92dc 253 /* The ..._DECL node to work on. */
254 tree decl;
255
256 /* The corresponding entity's Scope. */
257 Entity_Id gnat_scope;
258
259 /* The value of force_global when DECL was pushed. */
260 int force_global;
261
262 /* The list of ..._TYPE nodes to propagate the context to. */
263 vec<tree> types;
264
265 /* The next queue item. */
266 struct deferred_decl_context_node *next;
810e94f8 267};
268
269static struct deferred_decl_context_node *deferred_decl_context_queue = NULL;
270
271/* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
272 feed it with the elaboration of GNAT_SCOPE. */
273static struct deferred_decl_context_node *
274add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global);
275
276/* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
277 feed it with the DECL_CONTEXT computed as part of N as soon as it is
278 computed. */
279static void add_deferred_type_context (struct deferred_decl_context_node *n,
280 tree type);
27becfc8 281\f
76cb9822 282/* Initialize data structures of the utils.c module. */
27becfc8 283
284void
76cb9822 285init_gnat_utils (void)
27becfc8 286{
76cb9822 287 /* Initialize the association of GNAT nodes to GCC trees. */
25a27413 288 associate_gnat_to_gnu = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
76cb9822 289
290 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
25a27413 291 dummy_node_table = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
76cb9822 292
293 /* Initialize the hash table of padded types. */
d1023d12 294 pad_type_hash_table = hash_table<pad_type_hasher>::create_ggc (512);
27becfc8 295}
296
76cb9822 297/* Destroy data structures of the utils.c module. */
5154a747 298
299void
76cb9822 300destroy_gnat_utils (void)
5154a747 301{
76cb9822 302 /* Destroy the association of GNAT nodes to GCC trees. */
5154a747 303 ggc_free (associate_gnat_to_gnu);
304 associate_gnat_to_gnu = NULL;
5154a747 305
76cb9822 306 /* Destroy the association of GNAT nodes to GCC trees as dummies. */
307 ggc_free (dummy_node_table);
308 dummy_node_table = NULL;
309
310 /* Destroy the hash table of padded types. */
d1023d12 311 pad_type_hash_table->empty ();
76cb9822 312 pad_type_hash_table = NULL;
76cb9822 313}
314\f
bff37289 315/* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
316 tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
317 If NO_CHECK is true, the latter check is suppressed.
27becfc8 318
bff37289 319 If GNU_DECL is zero, reset a previous association. */
27becfc8 320
321void
322save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
323{
324 /* Check that GNAT_ENTITY is not already defined and that it is being set
bff37289 325 to something which is a decl. If that is not the case, this usually
27becfc8 326 means GNAT_ENTITY is defined twice, but occasionally is due to some
327 Gigi problem. */
328 gcc_assert (!(gnu_decl
329 && (PRESENT_GNU_TREE (gnat_entity)
330 || (!no_check && !DECL_P (gnu_decl)))));
331
332 SET_GNU_TREE (gnat_entity, gnu_decl);
333}
334
bff37289 335/* GNAT_ENTITY is a GNAT tree node for an entity. Return the GCC tree node
336 that was associated with it. If there is no such tree node, abort.
27becfc8 337
338 In some cases, such as delayed elaboration or expressions that need to
339 be elaborated only once, GNAT_ENTITY is really not an entity. */
340
341tree
342get_gnu_tree (Entity_Id gnat_entity)
343{
344 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
345 return GET_GNU_TREE (gnat_entity);
346}
347
348/* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
349
350bool
351present_gnu_tree (Entity_Id gnat_entity)
352{
353 return PRESENT_GNU_TREE (gnat_entity);
354}
355\f
27becfc8 356/* Make a dummy type corresponding to GNAT_TYPE. */
357
358tree
359make_dummy_type (Entity_Id gnat_type)
360{
bcde54d5 361 Entity_Id gnat_equiv = Gigi_Equivalent_Type (Underlying_Type (gnat_type));
16b68428 362 tree gnu_type, debug_type;
27becfc8 363
27becfc8 364 /* If there was no equivalent type (can only happen when just annotating
365 types) or underlying type, go back to the original type. */
bcde54d5 366 if (No (gnat_equiv))
367 gnat_equiv = gnat_type;
27becfc8 368
369 /* If it there already a dummy type, use that one. Else make one. */
bcde54d5 370 if (PRESENT_DUMMY_NODE (gnat_equiv))
371 return GET_DUMMY_NODE (gnat_equiv);
27becfc8 372
373 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
374 an ENUMERAL_TYPE. */
bcde54d5 375 gnu_type = make_node (Is_Record_Type (gnat_equiv)
376 ? tree_code_for_record_type (gnat_equiv)
27becfc8 377 : ENUMERAL_TYPE);
378 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
379 TYPE_DUMMY_P (gnu_type) = 1;
515c6c6c 380 TYPE_STUB_DECL (gnu_type)
381 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
bcde54d5 382 if (Is_By_Reference_Type (gnat_equiv))
a3b35344 383 TYPE_BY_REFERENCE_P (gnu_type) = 1;
27becfc8 384
bcde54d5 385 SET_DUMMY_NODE (gnat_equiv, gnu_type);
27becfc8 386
16b68428 387 /* Create a debug type so that debug info consumers only see an unspecified
388 type. */
389 if (Needs_Debug_Info (gnat_type))
390 {
391 debug_type = make_node (LANG_TYPE);
392 SET_TYPE_DEBUG_TYPE (gnu_type, debug_type);
393
394 TYPE_NAME (debug_type) = TYPE_NAME (gnu_type);
395 TYPE_ARTIFICIAL (debug_type) = TYPE_ARTIFICIAL (gnu_type);
396 }
397
27becfc8 398 return gnu_type;
399}
41dd28aa 400
401/* Return the dummy type that was made for GNAT_TYPE, if any. */
402
403tree
404get_dummy_type (Entity_Id gnat_type)
405{
406 return GET_DUMMY_NODE (gnat_type);
407}
408
409/* Build dummy fat and thin pointer types whose designated type is specified
410 by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter. */
411
412void
413build_dummy_unc_pointer_types (Entity_Id gnat_desig_type, tree gnu_desig_type)
414{
415 tree gnu_template_type, gnu_ptr_template, gnu_array_type, gnu_ptr_array;
416 tree gnu_fat_type, fields, gnu_object_type;
417
418 gnu_template_type = make_node (RECORD_TYPE);
419 TYPE_NAME (gnu_template_type) = create_concat_name (gnat_desig_type, "XUB");
420 TYPE_DUMMY_P (gnu_template_type) = 1;
421 gnu_ptr_template = build_pointer_type (gnu_template_type);
422
423 gnu_array_type = make_node (ENUMERAL_TYPE);
424 TYPE_NAME (gnu_array_type) = create_concat_name (gnat_desig_type, "XUA");
425 TYPE_DUMMY_P (gnu_array_type) = 1;
426 gnu_ptr_array = build_pointer_type (gnu_array_type);
427
428 gnu_fat_type = make_node (RECORD_TYPE);
429 /* Build a stub DECL to trigger the special processing for fat pointer types
430 in gnat_pushdecl. */
431 TYPE_NAME (gnu_fat_type)
432 = create_type_stub_decl (create_concat_name (gnat_desig_type, "XUP"),
433 gnu_fat_type);
434 fields = create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array,
435 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
436 DECL_CHAIN (fields)
437 = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template,
438 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
439 finish_fat_pointer_type (gnu_fat_type, fields);
440 SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type, gnu_desig_type);
441 /* Suppress debug info until after the type is completed. */
442 TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type)) = 1;
443
444 gnu_object_type = make_node (RECORD_TYPE);
445 TYPE_NAME (gnu_object_type) = create_concat_name (gnat_desig_type, "XUT");
446 TYPE_DUMMY_P (gnu_object_type) = 1;
447
448 TYPE_POINTER_TO (gnu_desig_type) = gnu_fat_type;
6a1231a5 449 TYPE_REFERENCE_TO (gnu_desig_type) = gnu_fat_type;
41dd28aa 450 TYPE_OBJECT_RECORD_TYPE (gnu_desig_type) = gnu_object_type;
451}
27becfc8 452\f
1d2bb655 453/* Return true if we are in the global binding level. */
27becfc8 454
1d2bb655 455bool
27becfc8 456global_bindings_p (void)
457{
ea780bd9 458 return force_global || !current_function_decl;
27becfc8 459}
460
db72a63f 461/* Enter a new binding level. */
27becfc8 462
463void
c88e6a4f 464gnat_pushlevel (void)
27becfc8 465{
466 struct gnat_binding_level *newlevel = NULL;
467
468 /* Reuse a struct for this binding level, if there is one. */
469 if (free_binding_level)
470 {
471 newlevel = free_binding_level;
472 free_binding_level = free_binding_level->chain;
473 }
474 else
25a27413 475 newlevel = ggc_alloc<gnat_binding_level> ();
27becfc8 476
477 /* Use a free BLOCK, if any; otherwise, allocate one. */
478 if (free_block_chain)
479 {
480 newlevel->block = free_block_chain;
481 free_block_chain = BLOCK_CHAIN (free_block_chain);
482 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
483 }
484 else
485 newlevel->block = make_node (BLOCK);
486
487 /* Point the BLOCK we just made to its parent. */
488 if (current_binding_level)
489 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
490
db72a63f 491 BLOCK_VARS (newlevel->block) = NULL_TREE;
492 BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
27becfc8 493 TREE_USED (newlevel->block) = 1;
494
db72a63f 495 /* Add this level to the front of the chain (stack) of active levels. */
27becfc8 496 newlevel->chain = current_binding_level;
497 newlevel->jmpbuf_decl = NULL_TREE;
498 current_binding_level = newlevel;
499}
500
501/* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
502 and point FNDECL to this BLOCK. */
503
504void
505set_current_block_context (tree fndecl)
506{
507 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
508 DECL_INITIAL (fndecl) = current_binding_level->block;
db72a63f 509 set_block_for_group (current_binding_level->block);
27becfc8 510}
511
512/* Set the jmpbuf_decl for the current binding level to DECL. */
513
514void
515set_block_jmpbuf_decl (tree decl)
516{
517 current_binding_level->jmpbuf_decl = decl;
518}
519
520/* Get the jmpbuf_decl, if any, for the current binding level. */
521
522tree
c88e6a4f 523get_block_jmpbuf_decl (void)
27becfc8 524{
525 return current_binding_level->jmpbuf_decl;
526}
527
db72a63f 528/* Exit a binding level. Set any BLOCK into the current code group. */
27becfc8 529
530void
c88e6a4f 531gnat_poplevel (void)
27becfc8 532{
533 struct gnat_binding_level *level = current_binding_level;
534 tree block = level->block;
535
536 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
43f0f599 537 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
27becfc8 538
539 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
540 are no variables free the block and merge its subblocks into those of its
db72a63f 541 parent block. Otherwise, add it to the list of its parent. */
27becfc8 542 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
543 ;
ea780bd9 544 else if (!BLOCK_VARS (block))
27becfc8 545 {
546 BLOCK_SUBBLOCKS (level->chain->block)
2149d019 547 = block_chainon (BLOCK_SUBBLOCKS (block),
548 BLOCK_SUBBLOCKS (level->chain->block));
27becfc8 549 BLOCK_CHAIN (block) = free_block_chain;
550 free_block_chain = block;
551 }
552 else
553 {
554 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
555 BLOCK_SUBBLOCKS (level->chain->block) = block;
556 TREE_USED (block) = 1;
557 set_block_for_group (block);
558 }
559
560 /* Free this binding structure. */
561 current_binding_level = level->chain;
562 level->chain = free_binding_level;
563 free_binding_level = level;
564}
565
24ffee3d 566/* Exit a binding level and discard the associated BLOCK. */
567
568void
569gnat_zaplevel (void)
570{
571 struct gnat_binding_level *level = current_binding_level;
572 tree block = level->block;
573
574 BLOCK_CHAIN (block) = free_block_chain;
575 free_block_chain = block;
576
577 /* Free this binding structure. */
578 current_binding_level = level->chain;
579 level->chain = free_binding_level;
580 free_binding_level = level;
581}
27becfc8 582\f
1d9cffb8 583/* Set the context of TYPE and its parallel types (if any) to CONTEXT. */
584
585static void
586gnat_set_type_context (tree type, tree context)
587{
588 tree decl = TYPE_STUB_DECL (type);
589
590 TYPE_CONTEXT (type) = context;
591
592 while (decl && DECL_PARALLEL_TYPE (decl))
593 {
ba502e2b 594 tree parallel_type = DECL_PARALLEL_TYPE (decl);
595
596 /* Give a context to the parallel types and their stub decl, if any.
597 Some parallel types seems to be present in multiple parallel type
598 chains, so don't mess with their context if they already have one. */
ea780bd9 599 if (!TYPE_CONTEXT (parallel_type))
ba502e2b 600 {
ea780bd9 601 if (TYPE_STUB_DECL (parallel_type))
ba502e2b 602 DECL_CONTEXT (TYPE_STUB_DECL (parallel_type)) = context;
603 TYPE_CONTEXT (parallel_type) = context;
604 }
605
1d9cffb8 606 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
607 }
608}
609
810e94f8 610/* Return the innermost scope, starting at GNAT_NODE, we are be interested in
611 the debug info, or Empty if there is no such scope. If not NULL, set
612 IS_SUBPROGRAM to whether the returned entity is a subprogram. */
613
28e28b29 614Entity_Id
810e94f8 615get_debug_scope (Node_Id gnat_node, bool *is_subprogram)
616{
617 Entity_Id gnat_entity;
618
619 if (is_subprogram)
620 *is_subprogram = false;
621
28e28b29 622 if (Nkind (gnat_node) == N_Defining_Identifier
623 || Nkind (gnat_node) == N_Defining_Operator_Symbol)
810e94f8 624 gnat_entity = Scope (gnat_node);
625 else
626 return Empty;
627
628 while (Present (gnat_entity))
629 {
630 switch (Ekind (gnat_entity))
631 {
632 case E_Function:
633 case E_Procedure:
634 if (Present (Protected_Body_Subprogram (gnat_entity)))
635 gnat_entity = Protected_Body_Subprogram (gnat_entity);
636
637 /* If the scope is a subprogram, then just rely on
638 current_function_decl, so that we don't have to defer
639 anything. This is needed because other places rely on the
640 validity of the DECL_CONTEXT attribute of FUNCTION_DECL nodes. */
641 if (is_subprogram)
642 *is_subprogram = true;
643 return gnat_entity;
644
645 case E_Record_Type:
646 case E_Record_Subtype:
647 return gnat_entity;
648
649 default:
650 /* By default, we are not interested in this particular scope: go to
651 the outer one. */
652 break;
653 }
ea780bd9 654
810e94f8 655 gnat_entity = Scope (gnat_entity);
656 }
ea780bd9 657
810e94f8 658 return Empty;
659}
660
ea780bd9 661/* If N is NULL, set TYPE's context to CONTEXT. Defer this to the processing
662 of N otherwise. */
810e94f8 663
664static void
ea780bd9 665defer_or_set_type_context (tree type, tree context,
810e94f8 666 struct deferred_decl_context_node *n)
667{
668 if (n)
669 add_deferred_type_context (n, type);
670 else
671 gnat_set_type_context (type, context);
672}
673
ea780bd9 674/* Return global_context, but create it first if need be. */
810e94f8 675
676static tree
677get_global_context (void)
678{
679 if (!global_context)
f2023823 680 {
96afa092 681 global_context
682 = build_translation_unit_decl (get_identifier (main_input_filename));
f2023823 683 debug_hooks->register_main_translation_unit (global_context);
684 }
ea780bd9 685
810e94f8 686 return global_context;
687}
688
eac916f9 689/* Record DECL as belonging to the current lexical scope and use GNAT_NODE
690 for location information and flag propagation. */
27becfc8 691
692void
693gnat_pushdecl (tree decl, Node_Id gnat_node)
694{
810e94f8 695 tree context = NULL_TREE;
696 struct deferred_decl_context_node *deferred_decl_context = NULL;
697
698 /* If explicitely asked to make DECL global or if it's an imported nested
699 object, short-circuit the regular Scope-based context computation. */
700 if (!((TREE_PUBLIC (decl) && DECL_EXTERNAL (decl)) || force_global == 1))
27becfc8 701 {
810e94f8 702 /* Rely on the GNAT scope, or fallback to the current_function_decl if
703 the GNAT scope reached the global scope, if it reached a subprogram
704 or the declaration is a subprogram or a variable (for them we skip
705 intermediate context types because the subprogram body elaboration
706 machinery and the inliner both expect a subprogram context).
707
708 Falling back to current_function_decl is necessary for implicit
709 subprograms created by gigi, such as the elaboration subprograms. */
710 bool context_is_subprogram = false;
711 const Entity_Id gnat_scope
712 = get_debug_scope (gnat_node, &context_is_subprogram);
713
714 if (Present (gnat_scope)
715 && !context_is_subprogram
716 && TREE_CODE (decl) != FUNCTION_DECL
717 && TREE_CODE (decl) != VAR_DECL)
718 /* Always assume the scope has not been elaborated, thus defer the
719 context propagation to the time its elaboration will be
720 available. */
721 deferred_decl_context
722 = add_deferred_decl_context (decl, gnat_scope, force_global);
723
724 /* External declarations (when force_global > 0) may not be in a
725 local context. */
ea780bd9 726 else if (current_function_decl && force_global == 0)
810e94f8 727 context = current_function_decl;
27becfc8 728 }
729
810e94f8 730 /* If either we are forced to be in global mode or if both the GNAT scope and
ea780bd9 731 the current_function_decl did not help in determining the context, use the
810e94f8 732 global scope. */
ea780bd9 733 if (!deferred_decl_context && !context)
810e94f8 734 context = get_global_context ();
735
736 /* Functions imported in another function are not really nested.
737 For really nested functions mark them initially as needing
738 a static chain for uses of that flag before unnesting;
739 lower_nested_functions will then recompute it. */
740 if (TREE_CODE (decl) == FUNCTION_DECL
741 && !TREE_PUBLIC (decl)
ea780bd9 742 && context
810e94f8 743 && (TREE_CODE (context) == FUNCTION_DECL
ea780bd9 744 || decl_function_context (context)))
810e94f8 745 DECL_STATIC_CHAIN (decl) = 1;
746
747 if (!deferred_decl_context)
748 DECL_CONTEXT (decl) = context;
749
eac916f9 750 TREE_NO_WARNING (decl) = (No (gnat_node) || Warnings_Off (gnat_node));
27becfc8 751
752 /* Set the location of DECL and emit a declaration for it. */
11f3f0ba 753 if (Present (gnat_node) && !renaming_from_generic_instantiation_p (gnat_node))
27becfc8 754 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
eac916f9 755
27becfc8 756 add_decl_expr (decl, gnat_node);
757
758 /* Put the declaration on the list. The list of declarations is in reverse
24ffee3d 759 order. The list will be reversed later. Put global declarations in the
760 globals list and local ones in the current block. But skip TYPE_DECLs
761 for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
762 with the debugger and aren't needed anyway. */
763 if (!(TREE_CODE (decl) == TYPE_DECL
764 && TREE_CODE (TREE_TYPE (decl)) == UNCONSTRAINED_ARRAY_TYPE))
27becfc8 765 {
288405ec 766 /* External declarations must go to the binding level they belong to.
767 This will make corresponding imported entities are available in the
768 debugger at the proper time. */
769 if (DECL_EXTERNAL (decl)
770 && TREE_CODE (decl) == FUNCTION_DECL
771 && DECL_BUILT_IN (decl))
772 vec_safe_push (builtin_decls, decl);
f29f9ff7 773 else if (global_bindings_p ())
0c6fd2e5 774 vec_safe_push (global_decls, decl);
f29f9ff7 775 else
27becfc8 776 {
04dd9724 777 DECL_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
778 BLOCK_VARS (current_binding_level->block) = decl;
27becfc8 779 }
780 }
781
a10d3a24 782 /* For the declaration of a type, set its name either if it isn't already
515c6c6c 783 set or if the previous type name was not derived from a source name.
27becfc8 784 We'd rather have the type named with a real name and all the pointer
a10d3a24 785 types to the same object have the same node, except when the names are
786 both derived from source names. */
27becfc8 787 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
788 {
789 tree t = TREE_TYPE (decl);
790
3a49fd1c 791 /* Array and pointer types aren't tagged types in the C sense so we need
792 to generate a typedef in DWARF for them and make sure it is preserved,
793 unless the type is artificial. */
a10d3a24 794 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
3a49fd1c 795 && ((TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != POINTER_TYPE)
796 || DECL_ARTIFICIAL (decl)))
797 ;
798 /* For array and pointer types, create the DECL_ORIGINAL_TYPE that will
799 generate the typedef in DWARF. Also do that for fat pointer types
800 because, even though they are tagged types in the C sense, they are
801 still XUP types attached to the base array type at this point. */
a10d3a24 802 else if (!DECL_ARTIFICIAL (decl)
3a49fd1c 803 && (TREE_CODE (t) == ARRAY_TYPE
804 || TREE_CODE (t) == POINTER_TYPE
805 || TYPE_IS_FAT_POINTER_P (t)))
27becfc8 806 {
be3464d5 807 tree tt = build_variant_type_copy (t);
27becfc8 808 TYPE_NAME (tt) = decl;
810e94f8 809 defer_or_set_type_context (tt,
810 DECL_CONTEXT (decl),
811 deferred_decl_context);
27becfc8 812 TREE_TYPE (decl) = tt;
3a49fd1c 813 if (TYPE_NAME (t)
a10d3a24 814 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
815 && DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
cf07a590 816 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
817 else
818 DECL_ORIGINAL_TYPE (decl) = t;
3a49fd1c 819 /* Array types need to have a name so that they can be related to
820 their GNAT encodings. */
821 if (TREE_CODE (t) == ARRAY_TYPE && !TYPE_NAME (t))
822 TYPE_NAME (t) = DECL_NAME (decl);
41dd28aa 823 t = NULL_TREE;
27becfc8 824 }
3a49fd1c 825 else if (TYPE_NAME (t)
a10d3a24 826 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
827 && DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
27becfc8 828 ;
829 else
830 t = NULL_TREE;
831
3a49fd1c 832 /* Propagate the name to all the variants, this is needed for the type
833 qualifiers machinery to work properly (see check_qualified_type).
834 Also propagate the context to them. Note that it will be propagated
835 to all parallel types too thanks to gnat_set_type_context. */
27becfc8 836 if (t)
837 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
3a49fd1c 838 /* ??? Because of the previous kludge, we can have variants of fat
839 pointer types with different names. */
840 if (!(TYPE_IS_FAT_POINTER_P (t)
841 && TYPE_NAME (t)
842 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
66f0b044 843 {
844 TYPE_NAME (t) = decl;
810e94f8 845 defer_or_set_type_context (t,
846 DECL_CONTEXT (decl),
847 deferred_decl_context);
66f0b044 848 }
27becfc8 849 }
850}
851\f
76cb9822 852/* Create a record type that contains a SIZE bytes long field of TYPE with a
853 starting bit position so that it is aligned to ALIGN bits, and leaving at
854 least ROOM bytes free before the field. BASE_ALIGN is the alignment the
5dd00251 855 record is guaranteed to get. GNAT_NODE is used for the position of the
856 associated TYPE_DECL. */
76cb9822 857
858tree
859make_aligning_type (tree type, unsigned int align, tree size,
5dd00251 860 unsigned int base_align, int room, Node_Id gnat_node)
76cb9822 861{
862 /* We will be crafting a record type with one field at a position set to be
863 the next multiple of ALIGN past record'address + room bytes. We use a
864 record placeholder to express record'address. */
865 tree record_type = make_node (RECORD_TYPE);
866 tree record = build0 (PLACEHOLDER_EXPR, record_type);
867
868 tree record_addr_st
869 = convert (sizetype, build_unary_op (ADDR_EXPR, NULL_TREE, record));
870
871 /* The diagram below summarizes the shape of what we manipulate:
872
873 <--------- pos ---------->
874 { +------------+-------------+-----------------+
875 record =>{ |############| ... | field (type) |
876 { +------------+-------------+-----------------+
877 |<-- room -->|<- voffset ->|<---- size ----->|
878 o o
879 | |
880 record_addr vblock_addr
881
882 Every length is in sizetype bytes there, except "pos" which has to be
883 set as a bit position in the GCC tree for the record. */
884 tree room_st = size_int (room);
885 tree vblock_addr_st = size_binop (PLUS_EXPR, record_addr_st, room_st);
886 tree voffset_st, pos, field;
887
48dcce25 888 tree name = TYPE_IDENTIFIER (type);
76cb9822 889
76cb9822 890 name = concat_name (name, "ALIGN");
891 TYPE_NAME (record_type) = name;
892
893 /* Compute VOFFSET and then POS. The next byte position multiple of some
894 alignment after some address is obtained by "and"ing the alignment minus
895 1 with the two's complement of the address. */
896 voffset_st = size_binop (BIT_AND_EXPR,
897 fold_build1 (NEGATE_EXPR, sizetype, vblock_addr_st),
898 size_int ((align / BITS_PER_UNIT) - 1));
899
900 /* POS = (ROOM + VOFFSET) * BIT_PER_UNIT, in bitsizetype. */
901 pos = size_binop (MULT_EXPR,
902 convert (bitsizetype,
903 size_binop (PLUS_EXPR, room_st, voffset_st)),
904 bitsize_unit_node);
905
906 /* Craft the GCC record representation. We exceptionally do everything
907 manually here because 1) our generic circuitry is not quite ready to
908 handle the complex position/size expressions we are setting up, 2) we
909 have a strong simplifying factor at hand: we know the maximum possible
910 value of voffset, and 3) we have to set/reset at least the sizes in
911 accordance with this maximum value anyway, as we need them to convey
912 what should be "alloc"ated for this type.
913
914 Use -1 as the 'addressable' indication for the field to prevent the
915 creation of a bitfield. We don't need one, it would have damaging
916 consequences on the alignment computation, and create_field_decl would
917 make one without this special argument, for instance because of the
918 complex position expression. */
919 field = create_field_decl (get_identifier ("F"), type, record_type, size,
920 pos, 1, -1);
921 TYPE_FIELDS (record_type) = field;
922
5d4b30ea 923 SET_TYPE_ALIGN (record_type, base_align);
76cb9822 924 TYPE_USER_ALIGN (record_type) = 1;
925
926 TYPE_SIZE (record_type)
927 = size_binop (PLUS_EXPR,
928 size_binop (MULT_EXPR, convert (bitsizetype, size),
929 bitsize_unit_node),
930 bitsize_int (align + room * BITS_PER_UNIT));
931 TYPE_SIZE_UNIT (record_type)
932 = size_binop (PLUS_EXPR, size,
933 size_int (room + align / BITS_PER_UNIT));
934
935 SET_TYPE_MODE (record_type, BLKmode);
936 relate_alias_sets (record_type, type, ALIAS_SET_COPY);
937
938 /* Declare it now since it will never be declared otherwise. This is
939 necessary to ensure that its subtrees are properly marked. */
081f18cf 940 create_type_decl (name, record_type, true, false, gnat_node);
76cb9822 941
942 return record_type;
943}
944
945/* TYPE is a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE that is being used
946 as the field type of a packed record if IN_RECORD is true, or as the
947 component type of a packed array if IN_RECORD is false. See if we can
7214e56d 948 rewrite it either as a type that has non-BLKmode, which we can pack
949 tighter in the packed record case, or as a smaller type with at most
950 MAX_ALIGN alignment if the value is non-zero. If so, return the new
951 type; if not, return the original type. */
76cb9822 952
953tree
7214e56d 954make_packable_type (tree type, bool in_record, unsigned int max_align)
76cb9822 955{
e913b5cd 956 unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE (type));
76cb9822 957 unsigned HOST_WIDE_INT new_size;
7214e56d 958 unsigned int align = TYPE_ALIGN (type);
959 unsigned int new_align;
76cb9822 960
961 /* No point in doing anything if the size is zero. */
962 if (size == 0)
963 return type;
964
7214e56d 965 tree new_type = make_node (TREE_CODE (type));
76cb9822 966
967 /* Copy the name and flags from the old type to that of the new.
968 Note that we rely on the pointer equality created here for
969 TYPE_NAME to look through conversions in various places. */
970 TYPE_NAME (new_type) = TYPE_NAME (type);
971 TYPE_JUSTIFIED_MODULAR_P (new_type) = TYPE_JUSTIFIED_MODULAR_P (type);
972 TYPE_CONTAINS_TEMPLATE_P (new_type) = TYPE_CONTAINS_TEMPLATE_P (type);
292237f3 973 TYPE_REVERSE_STORAGE_ORDER (new_type) = TYPE_REVERSE_STORAGE_ORDER (type);
76cb9822 974 if (TREE_CODE (type) == RECORD_TYPE)
975 TYPE_PADDING_P (new_type) = TYPE_PADDING_P (type);
976
977 /* If we are in a record and have a small size, set the alignment to
978 try for an integral mode. Otherwise set it to try for a smaller
979 type with BLKmode. */
980 if (in_record && size <= MAX_FIXED_MODE_SIZE)
981 {
7214e56d 982 new_size = ceil_pow2 (size);
983 new_align = MIN (new_size, BIGGEST_ALIGNMENT);
984 SET_TYPE_ALIGN (new_type, new_align);
76cb9822 985 }
986 else
987 {
76cb9822 988 /* Do not try to shrink the size if the RM size is not constant. */
989 if (TYPE_CONTAINS_TEMPLATE_P (type)
e913b5cd 990 || !tree_fits_uhwi_p (TYPE_ADA_SIZE (type)))
76cb9822 991 return type;
992
993 /* Round the RM size up to a unit boundary to get the minimal size
7214e56d 994 for a BLKmode record. Give up if it's already the size and we
995 don't need to lower the alignment. */
e913b5cd 996 new_size = tree_to_uhwi (TYPE_ADA_SIZE (type));
76cb9822 997 new_size = (new_size + BITS_PER_UNIT - 1) & -BITS_PER_UNIT;
7214e56d 998 if (new_size == size && (max_align == 0 || align <= max_align))
76cb9822 999 return type;
1000
7214e56d 1001 new_align = MIN (new_size & -new_size, BIGGEST_ALIGNMENT);
1002 if (max_align > 0 && new_align > max_align)
1003 new_align = max_align;
1004 SET_TYPE_ALIGN (new_type, MIN (align, new_align));
76cb9822 1005 }
1006
1007 TYPE_USER_ALIGN (new_type) = 1;
1008
1009 /* Now copy the fields, keeping the position and size as we don't want
1010 to change the layout by propagating the packedness downwards. */
7214e56d 1011 tree new_field_list = NULL_TREE;
1012 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
76cb9822 1013 {
7214e56d 1014 tree new_field_type = TREE_TYPE (field);
76cb9822 1015 tree new_field, new_size;
1016
1017 if (RECORD_OR_UNION_TYPE_P (new_field_type)
1018 && !TYPE_FAT_POINTER_P (new_field_type)
e913b5cd 1019 && tree_fits_uhwi_p (TYPE_SIZE (new_field_type)))
7214e56d 1020 new_field_type = make_packable_type (new_field_type, true, max_align);
76cb9822 1021
1022 /* However, for the last field in a not already packed record type
1023 that is of an aggregate type, we need to use the RM size in the
1024 packable version of the record type, see finish_record_type. */
7214e56d 1025 if (!DECL_CHAIN (field)
76cb9822 1026 && !TYPE_PACKED (type)
1027 && RECORD_OR_UNION_TYPE_P (new_field_type)
1028 && !TYPE_FAT_POINTER_P (new_field_type)
1029 && !TYPE_CONTAINS_TEMPLATE_P (new_field_type)
1030 && TYPE_ADA_SIZE (new_field_type))
1031 new_size = TYPE_ADA_SIZE (new_field_type);
1032 else
7214e56d 1033 new_size = DECL_SIZE (field);
76cb9822 1034
1035 new_field
7214e56d 1036 = create_field_decl (DECL_NAME (field), new_field_type, new_type,
1037 new_size, bit_position (field),
76cb9822 1038 TYPE_PACKED (type),
7214e56d 1039 !DECL_NONADDRESSABLE_P (field));
76cb9822 1040
7214e56d 1041 DECL_INTERNAL_P (new_field) = DECL_INTERNAL_P (field);
1042 SET_DECL_ORIGINAL_FIELD_TO_FIELD (new_field, field);
76cb9822 1043 if (TREE_CODE (new_type) == QUAL_UNION_TYPE)
7214e56d 1044 DECL_QUALIFIER (new_field) = DECL_QUALIFIER (field);
76cb9822 1045
7214e56d 1046 DECL_CHAIN (new_field) = new_field_list;
1047 new_field_list = new_field;
76cb9822 1048 }
1049
7214e56d 1050 finish_record_type (new_type, nreverse (new_field_list), 2, false);
76cb9822 1051 relate_alias_sets (new_type, type, ALIAS_SET_COPY);
62092ba6 1052 if (TYPE_STUB_DECL (type))
1053 SET_DECL_PARALLEL_TYPE (TYPE_STUB_DECL (new_type),
1054 DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type)));
76cb9822 1055
1056 /* If this is a padding record, we never want to make the size smaller
1057 than what was specified. For QUAL_UNION_TYPE, also copy the size. */
1058 if (TYPE_IS_PADDING_P (type) || TREE_CODE (type) == QUAL_UNION_TYPE)
1059 {
1060 TYPE_SIZE (new_type) = TYPE_SIZE (type);
1061 TYPE_SIZE_UNIT (new_type) = TYPE_SIZE_UNIT (type);
1062 new_size = size;
1063 }
1064 else
1065 {
1066 TYPE_SIZE (new_type) = bitsize_int (new_size);
7214e56d 1067 TYPE_SIZE_UNIT (new_type) = size_int (new_size / BITS_PER_UNIT);
76cb9822 1068 }
1069
1070 if (!TYPE_CONTAINS_TEMPLATE_P (type))
1071 SET_TYPE_ADA_SIZE (new_type, TYPE_ADA_SIZE (type));
1072
1073 compute_record_mode (new_type);
1074
1075 /* Try harder to get a packable type if necessary, for example
1076 in case the record itself contains a BLKmode field. */
1077 if (in_record && TYPE_MODE (new_type) == BLKmode)
1078 SET_TYPE_MODE (new_type,
1079 mode_for_size_tree (TYPE_SIZE (new_type), MODE_INT, 1));
1080
7214e56d 1081 /* If neither mode nor size nor alignment shrunk, return the old type. */
1082 if (TYPE_MODE (new_type) == BLKmode && new_size >= size && max_align == 0)
76cb9822 1083 return type;
1084
1085 return new_type;
1086}
1087
a84cc613 1088/* Return true if TYPE has an unsigned representation. This needs to be used
1089 when the representation of types whose precision is not equal to their size
1090 is manipulated based on the RM size. */
1091
1092static inline bool
1093type_unsigned_for_rm (tree type)
1094{
1095 /* This is the common case. */
1096 if (TYPE_UNSIGNED (type))
1097 return true;
1098
1099 /* See the E_Signed_Integer_Subtype case of gnat_to_gnu_entity. */
1100 if (TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST
1101 && tree_int_cst_sgn (TYPE_MIN_VALUE (type)) >= 0)
1102 return true;
1103
1104 return false;
1105}
1106
76cb9822 1107/* Given a type TYPE, return a new type whose size is appropriate for SIZE.
1108 If TYPE is the best type, return it. Otherwise, make a new type. We
1109 only support new integral and pointer types. FOR_BIASED is true if
1110 we are making a biased type. */
1111
1112tree
1113make_type_from_size (tree type, tree size_tree, bool for_biased)
1114{
1115 unsigned HOST_WIDE_INT size;
1116 bool biased_p;
1117 tree new_type;
1118
1119 /* If size indicates an error, just return TYPE to avoid propagating
1120 the error. Likewise if it's too large to represent. */
e913b5cd 1121 if (!size_tree || !tree_fits_uhwi_p (size_tree))
76cb9822 1122 return type;
1123
e913b5cd 1124 size = tree_to_uhwi (size_tree);
76cb9822 1125
1126 switch (TREE_CODE (type))
1127 {
1128 case INTEGER_TYPE:
1129 case ENUMERAL_TYPE:
1130 case BOOLEAN_TYPE:
1131 biased_p = (TREE_CODE (type) == INTEGER_TYPE
1132 && TYPE_BIASED_REPRESENTATION_P (type));
1133
1134 /* Integer types with precision 0 are forbidden. */
1135 if (size == 0)
1136 size = 1;
1137
1138 /* Only do something if the type isn't a packed array type and doesn't
1139 already have the proper size and the size isn't too large. */
1140 if (TYPE_IS_PACKED_ARRAY_TYPE_P (type)
1141 || (TYPE_PRECISION (type) == size && biased_p == for_biased)
1142 || size > LONG_LONG_TYPE_SIZE)
1143 break;
1144
1145 biased_p |= for_biased;
6827ab42 1146
1147 /* The type should be an unsigned type if the original type is unsigned
1148 or if the lower bound is constant and non-negative or if the type is
1149 biased, see E_Signed_Integer_Subtype case of gnat_to_gnu_entity. */
a84cc613 1150 if (type_unsigned_for_rm (type) || biased_p)
76cb9822 1151 new_type = make_unsigned_type (size);
1152 else
1153 new_type = make_signed_type (size);
1154 TREE_TYPE (new_type) = TREE_TYPE (type) ? TREE_TYPE (type) : type;
6fa4bf38 1155 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_MIN_VALUE (type));
1156 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_MAX_VALUE (type));
76cb9822 1157 /* Copy the name to show that it's essentially the same type and
1158 not a subrange type. */
1159 TYPE_NAME (new_type) = TYPE_NAME (type);
1160 TYPE_BIASED_REPRESENTATION_P (new_type) = biased_p;
1161 SET_TYPE_RM_SIZE (new_type, bitsize_int (size));
1162 return new_type;
1163
1164 case RECORD_TYPE:
1165 /* Do something if this is a fat pointer, in which case we
1166 may need to return the thin pointer. */
1167 if (TYPE_FAT_POINTER_P (type) && size < POINTER_SIZE * 2)
1168 {
44504d18 1169 scalar_int_mode p_mode;
1170 if (!int_mode_for_size (size, 0).exists (&p_mode)
1171 || !targetm.valid_pointer_mode (p_mode))
76cb9822 1172 p_mode = ptr_mode;
1173 return
1174 build_pointer_type_for_mode
1175 (TYPE_OBJECT_RECORD_TYPE (TYPE_UNCONSTRAINED_ARRAY (type)),
1176 p_mode, 0);
1177 }
1178 break;
1179
1180 case POINTER_TYPE:
1181 /* Only do something if this is a thin pointer, in which case we
1182 may need to return the fat pointer. */
1183 if (TYPE_IS_THIN_POINTER_P (type) && size >= POINTER_SIZE * 2)
1184 return
1185 build_pointer_type (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)));
1186 break;
1187
1188 default:
1189 break;
1190 }
1191
1192 return type;
1193}
1194
1195/* See if the data pointed to by the hash table slot is marked. */
1196
99378011 1197int
1198pad_type_hasher::keep_cache_entry (pad_type_hash *&t)
76cb9822 1199{
99378011 1200 return ggc_marked_p (t->type);
76cb9822 1201}
1202
d1023d12 1203/* Return true iff the padded types are equivalent. */
76cb9822 1204
d1023d12 1205bool
1206pad_type_hasher::equal (pad_type_hash *t1, pad_type_hash *t2)
76cb9822 1207{
76cb9822 1208 tree type1, type2;
1209
1210 if (t1->hash != t2->hash)
1211 return 0;
1212
1213 type1 = t1->type;
1214 type2 = t2->type;
1215
292237f3 1216 /* We consider that the padded types are equivalent if they pad the same type
1217 and have the same size, alignment, RM size and storage order. Taking the
1218 mode into account is redundant since it is determined by the others. */
76cb9822 1219 return
1220 TREE_TYPE (TYPE_FIELDS (type1)) == TREE_TYPE (TYPE_FIELDS (type2))
1221 && TYPE_SIZE (type1) == TYPE_SIZE (type2)
1222 && TYPE_ALIGN (type1) == TYPE_ALIGN (type2)
292237f3 1223 && TYPE_ADA_SIZE (type1) == TYPE_ADA_SIZE (type2)
1224 && TYPE_REVERSE_STORAGE_ORDER (type1) == TYPE_REVERSE_STORAGE_ORDER (type2);
76cb9822 1225}
1226
c6ec89a6 1227/* Look up the padded TYPE in the hash table and return its canonical version
1228 if it exists; otherwise, insert it into the hash table. */
1229
1230static tree
1231lookup_and_insert_pad_type (tree type)
1232{
1233 hashval_t hashcode;
1234 struct pad_type_hash in, *h;
c6ec89a6 1235
1236 hashcode
1237 = iterative_hash_object (TYPE_HASH (TREE_TYPE (TYPE_FIELDS (type))), 0);
1238 hashcode = iterative_hash_expr (TYPE_SIZE (type), hashcode);
1239 hashcode = iterative_hash_hashval_t (TYPE_ALIGN (type), hashcode);
1240 hashcode = iterative_hash_expr (TYPE_ADA_SIZE (type), hashcode);
1241
1242 in.hash = hashcode;
1243 in.type = type;
d1023d12 1244 h = pad_type_hash_table->find_with_hash (&in, hashcode);
c6ec89a6 1245 if (h)
1246 return h->type;
1247
1248 h = ggc_alloc<pad_type_hash> ();
1249 h->hash = hashcode;
1250 h->type = type;
d1023d12 1251 *pad_type_hash_table->find_slot_with_hash (h, hashcode, INSERT) = h;
c6ec89a6 1252 return NULL_TREE;
1253}
1254
76cb9822 1255/* Ensure that TYPE has SIZE and ALIGN. Make and return a new padded type
c6ec89a6 1256 if needed. We have already verified that SIZE and ALIGN are large enough.
76cb9822 1257 GNAT_ENTITY is used to name the resulting record and to issue a warning.
1258 IS_COMPONENT_TYPE is true if this is being done for the component type of
1259 an array. IS_USER_TYPE is true if the original type needs to be completed.
1260 DEFINITION is true if this type is being defined. SET_RM_SIZE is true if
baaf92dc 1261 the RM size of the resulting type is to be set to SIZE too; in this case,
1262 the padded type is canonicalized before being returned. */
76cb9822 1263
1264tree
1265maybe_pad_type (tree type, tree size, unsigned int align,
1266 Entity_Id gnat_entity, bool is_component_type,
1267 bool is_user_type, bool definition, bool set_rm_size)
1268{
1269 tree orig_size = TYPE_SIZE (type);
62092ba6 1270 unsigned int orig_align = TYPE_ALIGN (type);
76cb9822 1271 tree record, field;
1272
1273 /* If TYPE is a padded type, see if it agrees with any size and alignment
1274 we were given. If so, return the original type. Otherwise, strip
1275 off the padding, since we will either be returning the inner type
1276 or repadding it. If no size or alignment is specified, use that of
1277 the original padded type. */
1278 if (TYPE_IS_PADDING_P (type))
1279 {
1280 if ((!size
62092ba6 1281 || operand_equal_p (round_up (size, orig_align), orig_size, 0))
1282 && (align == 0 || align == orig_align))
76cb9822 1283 return type;
1284
1285 if (!size)
62092ba6 1286 size = orig_size;
76cb9822 1287 if (align == 0)
62092ba6 1288 align = orig_align;
76cb9822 1289
1290 type = TREE_TYPE (TYPE_FIELDS (type));
1291 orig_size = TYPE_SIZE (type);
62092ba6 1292 orig_align = TYPE_ALIGN (type);
76cb9822 1293 }
1294
1295 /* If the size is either not being changed or is being made smaller (which
1296 is not done here and is only valid for bitfields anyway), show the size
1297 isn't changing. Likewise, clear the alignment if it isn't being
1298 changed. Then return if we aren't doing anything. */
1299 if (size
1300 && (operand_equal_p (size, orig_size, 0)
1301 || (TREE_CODE (orig_size) == INTEGER_CST
1302 && tree_int_cst_lt (size, orig_size))))
1303 size = NULL_TREE;
1304
62092ba6 1305 if (align == orig_align)
76cb9822 1306 align = 0;
1307
1308 if (align == 0 && !size)
1309 return type;
1310
1311 /* If requested, complete the original type and give it a name. */
1312 if (is_user_type)
1313 create_type_decl (get_entity_name (gnat_entity), type,
081f18cf 1314 !Comes_From_Source (gnat_entity),
76cb9822 1315 !(TYPE_NAME (type)
1316 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1317 && DECL_IGNORED_P (TYPE_NAME (type))),
1318 gnat_entity);
1319
1320 /* We used to modify the record in place in some cases, but that could
1321 generate incorrect debugging information. So make a new record
1322 type and name. */
1323 record = make_node (RECORD_TYPE);
1324 TYPE_PADDING_P (record) = 1;
1325
ea780bd9 1326 /* ??? Padding types around packed array implementation types will be
91f09771 1327 considered as root types in the array descriptor language hook (see
1328 gnat_get_array_descr_info). Give them the original packed array type
1329 name so that the one coming from sources appears in the debugging
1330 information. */
ea780bd9 1331 if (TYPE_IMPL_PACKED_ARRAY_P (type)
1332 && TYPE_ORIGINAL_PACKED_ARRAY (type)
1333 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
1334 TYPE_NAME (record) = TYPE_NAME (TYPE_ORIGINAL_PACKED_ARRAY (type));
91f09771 1335 else if (Present (gnat_entity))
76cb9822 1336 TYPE_NAME (record) = create_concat_name (gnat_entity, "PAD");
1337
5d4b30ea 1338 SET_TYPE_ALIGN (record, align ? align : orig_align);
76cb9822 1339 TYPE_SIZE (record) = size ? size : orig_size;
1340 TYPE_SIZE_UNIT (record)
1341 = convert (sizetype,
1342 size_binop (CEIL_DIV_EXPR, TYPE_SIZE (record),
1343 bitsize_unit_node));
1344
1345 /* If we are changing the alignment and the input type is a record with
1346 BLKmode and a small constant size, try to make a form that has an
1347 integral mode. This might allow the padding record to also have an
1348 integral mode, which will be much more efficient. There is no point
1349 in doing so if a size is specified unless it is also a small constant
1350 size and it is incorrect to do so if we cannot guarantee that the mode
1351 will be naturally aligned since the field must always be addressable.
1352
1353 ??? This might not always be a win when done for a stand-alone object:
1354 since the nominal and the effective type of the object will now have
1355 different modes, a VIEW_CONVERT_EXPR will be required for converting
1356 between them and it might be hard to overcome afterwards, including
1357 at the RTL level when the stand-alone object is accessed as a whole. */
1358 if (align != 0
1359 && RECORD_OR_UNION_TYPE_P (type)
1360 && TYPE_MODE (type) == BLKmode
1361 && !TYPE_BY_REFERENCE_P (type)
1362 && TREE_CODE (orig_size) == INTEGER_CST
1363 && !TREE_OVERFLOW (orig_size)
1364 && compare_tree_int (orig_size, MAX_FIXED_MODE_SIZE) <= 0
1365 && (!size
1366 || (TREE_CODE (size) == INTEGER_CST
1367 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) <= 0)))
1368 {
1369 tree packable_type = make_packable_type (type, true);
1370 if (TYPE_MODE (packable_type) != BLKmode
1371 && align >= TYPE_ALIGN (packable_type))
1372 type = packable_type;
1373 }
1374
1375 /* Now create the field with the original size. */
783c4ee3 1376 field = create_field_decl (get_identifier ("F"), type, record, orig_size,
1377 bitsize_zero_node, 0, 1);
76cb9822 1378 DECL_INTERNAL_P (field) = 1;
1379
baaf92dc 1380 /* We will output additional debug info manually below. */
76cb9822 1381 finish_record_type (record, field, 1, false);
1382
baaf92dc 1383 if (gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
1384 SET_TYPE_DEBUG_TYPE (record, type);
1385
76cb9822 1386 /* Set the RM size if requested. */
1387 if (set_rm_size)
1388 {
c6ec89a6 1389 tree canonical_pad_type;
1390
76cb9822 1391 SET_TYPE_ADA_SIZE (record, size ? size : orig_size);
1392
1393 /* If the padded type is complete and has constant size, we canonicalize
1394 it by means of the hash table. This is consistent with the language
1395 semantics and ensures that gigi and the middle-end have a common view
1396 of these padded types. */
c6ec89a6 1397 if (TREE_CONSTANT (TYPE_SIZE (record))
1398 && (canonical_pad_type = lookup_and_insert_pad_type (record)))
76cb9822 1399 {
c6ec89a6 1400 record = canonical_pad_type;
1401 goto built;
76cb9822 1402 }
1403 }
1404
1405 /* Unless debugging information isn't being written for the input type,
1406 write a record that shows what we are a subtype of and also make a
db3c183a 1407 variable that indicates our size, if still variable. */
1408 if (TREE_CODE (orig_size) != INTEGER_CST
76cb9822 1409 && TYPE_NAME (record)
1410 && TYPE_NAME (type)
1411 && !(TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1412 && DECL_IGNORED_P (TYPE_NAME (type))))
1413 {
48dcce25 1414 tree name = TYPE_IDENTIFIER (record);
4905002b 1415 tree size_unit = TYPE_SIZE_UNIT (record);
1416
1417 /* A variable that holds the size is required even with no encoding since
1418 it will be referenced by debugging information attributes. At global
1419 level, we need a single variable across all translation units. */
1420 if (size
1421 && TREE_CODE (size) != INTEGER_CST
1422 && (definition || global_bindings_p ()))
1423 {
db3c183a 1424 /* Whether or not gnat_entity comes from source, this XVZ variable is
1425 is a compilation artifact. */
4905002b 1426 size_unit
1427 = create_var_decl (concat_name (name, "XVZ"), NULL_TREE, sizetype,
1428 size_unit, true, global_bindings_p (),
1429 !definition && global_bindings_p (), false,
c0df8418 1430 false, true, true, NULL, gnat_entity);
4905002b 1431 TYPE_SIZE_UNIT (record) = size_unit;
1432 }
1433
db3c183a 1434 /* There is no need to show what we are a subtype of when outputting as
1435 few encodings as possible: regular debugging infomation makes this
1436 redundant. */
1437 if (gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
1438 {
1439 tree marker = make_node (RECORD_TYPE);
1440 tree orig_name = TYPE_IDENTIFIER (type);
1441
1442 TYPE_NAME (marker) = concat_name (name, "XVS");
1443 finish_record_type (marker,
1444 create_field_decl (orig_name,
1445 build_reference_type (type),
1446 marker, NULL_TREE, NULL_TREE,
1447 0, 0),
1448 0, true);
1449 TYPE_SIZE_UNIT (marker) = size_unit;
1450
1451 add_parallel_type (record, marker);
1452 }
76cb9822 1453 }
1454
76cb9822 1455built:
e43e03dc 1456 /* If a simple size was explicitly given, maybe issue a warning. */
76cb9822 1457 if (!size
1458 || TREE_CODE (size) == COND_EXPR
1459 || TREE_CODE (size) == MAX_EXPR
e43e03dc 1460 || No (gnat_entity))
76cb9822 1461 return record;
1462
e43e03dc 1463 /* But don't do it if we are just annotating types and the type is tagged or
1464 concurrent, since these types aren't fully laid out in this mode. */
1465 if (type_annotate_only)
1466 {
1467 Entity_Id gnat_type
1468 = is_component_type
1469 ? Component_Type (gnat_entity) : Etype (gnat_entity);
1470
1471 if (Is_Tagged_Type (gnat_type) || Is_Concurrent_Type (gnat_type))
1472 return record;
1473 }
1474
1475 /* Take the original size as the maximum size of the input if there was an
1476 unconstrained record involved and round it up to the specified alignment,
1477 if one was specified, but only for aggregate types. */
76cb9822 1478 if (CONTAINS_PLACEHOLDER_P (orig_size))
1479 orig_size = max_size (orig_size, true);
1480
108d967d 1481 if (align && AGGREGATE_TYPE_P (type))
76cb9822 1482 orig_size = round_up (orig_size, align);
1483
1484 if (!operand_equal_p (size, orig_size, 0)
1485 && !(TREE_CODE (size) == INTEGER_CST
1486 && TREE_CODE (orig_size) == INTEGER_CST
1487 && (TREE_OVERFLOW (size)
1488 || TREE_OVERFLOW (orig_size)
1489 || tree_int_cst_lt (size, orig_size))))
1490 {
1491 Node_Id gnat_error_node = Empty;
1492
7c9243cb 1493 /* For a packed array, post the message on the original array type. */
1494 if (Is_Packed_Array_Impl_Type (gnat_entity))
76cb9822 1495 gnat_entity = Original_Array_Type (gnat_entity);
1496
1497 if ((Ekind (gnat_entity) == E_Component
1498 || Ekind (gnat_entity) == E_Discriminant)
1499 && Present (Component_Clause (gnat_entity)))
1500 gnat_error_node = Last_Bit (Component_Clause (gnat_entity));
1501 else if (Present (Size_Clause (gnat_entity)))
1502 gnat_error_node = Expression (Size_Clause (gnat_entity));
1503
1504 /* Generate message only for entities that come from source, since
1505 if we have an entity created by expansion, the message will be
1506 generated for some other corresponding source entity. */
1507 if (Comes_From_Source (gnat_entity))
1508 {
1509 if (Present (gnat_error_node))
1510 post_error_ne_tree ("{^ }bits of & unused?",
1511 gnat_error_node, gnat_entity,
1512 size_diffop (size, orig_size));
1513 else if (is_component_type)
1514 post_error_ne_tree ("component of& padded{ by ^ bits}?",
1515 gnat_entity, gnat_entity,
1516 size_diffop (size, orig_size));
1517 }
1518 }
1519
1520 return record;
1521}
292237f3 1522
1523/* Return a copy of the padded TYPE but with reverse storage order. */
1524
1525tree
1526set_reverse_storage_order_on_pad_type (tree type)
1527{
1528 tree field, canonical_pad_type;
1529
d28596a5 1530 if (flag_checking)
1531 {
1532 /* If the inner type is not scalar then the function does nothing. */
1533 tree inner_type = TREE_TYPE (TYPE_FIELDS (type));
1534 gcc_assert (!AGGREGATE_TYPE_P (inner_type)
1535 && !VECTOR_TYPE_P (inner_type));
1536 }
292237f3 1537
1538 /* This is required for the canonicalization. */
1539 gcc_assert (TREE_CONSTANT (TYPE_SIZE (type)));
1540
1541 field = copy_node (TYPE_FIELDS (type));
1542 type = copy_type (type);
1543 DECL_CONTEXT (field) = type;
1544 TYPE_FIELDS (type) = field;
1545 TYPE_REVERSE_STORAGE_ORDER (type) = 1;
1546 canonical_pad_type = lookup_and_insert_pad_type (type);
1547 return canonical_pad_type ? canonical_pad_type : type;
1548}
76cb9822 1549\f
1550/* Relate the alias sets of GNU_NEW_TYPE and GNU_OLD_TYPE according to OP.
1551 If this is a multi-dimensional array type, do this recursively.
1552
1553 OP may be
1554 - ALIAS_SET_COPY: the new set is made a copy of the old one.
1555 - ALIAS_SET_SUPERSET: the new set is made a superset of the old one.
1556 - ALIAS_SET_SUBSET: the new set is made a subset of the old one. */
1557
1558void
1559relate_alias_sets (tree gnu_new_type, tree gnu_old_type, enum alias_set_op op)
1560{
1561 /* Remove any padding from GNU_OLD_TYPE. It doesn't matter in the case
1562 of a one-dimensional array, since the padding has the same alias set
1563 as the field type, but if it's a multi-dimensional array, we need to
1564 see the inner types. */
1565 while (TREE_CODE (gnu_old_type) == RECORD_TYPE
1566 && (TYPE_JUSTIFIED_MODULAR_P (gnu_old_type)
1567 || TYPE_PADDING_P (gnu_old_type)))
1568 gnu_old_type = TREE_TYPE (TYPE_FIELDS (gnu_old_type));
1569
1570 /* Unconstrained array types are deemed incomplete and would thus be given
1571 alias set 0. Retrieve the underlying array type. */
1572 if (TREE_CODE (gnu_old_type) == UNCONSTRAINED_ARRAY_TYPE)
1573 gnu_old_type
1574 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_old_type))));
1575 if (TREE_CODE (gnu_new_type) == UNCONSTRAINED_ARRAY_TYPE)
1576 gnu_new_type
1577 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_new_type))));
1578
1579 if (TREE_CODE (gnu_new_type) == ARRAY_TYPE
1580 && TREE_CODE (TREE_TYPE (gnu_new_type)) == ARRAY_TYPE
1581 && TYPE_MULTI_ARRAY_P (TREE_TYPE (gnu_new_type)))
1582 relate_alias_sets (TREE_TYPE (gnu_new_type), TREE_TYPE (gnu_old_type), op);
1583
1584 switch (op)
1585 {
1586 case ALIAS_SET_COPY:
1587 /* The alias set shouldn't be copied between array types with different
1588 aliasing settings because this can break the aliasing relationship
1589 between the array type and its element type. */
2de3a813 1590 if (flag_checking || flag_strict_aliasing)
76cb9822 1591 gcc_assert (!(TREE_CODE (gnu_new_type) == ARRAY_TYPE
1592 && TREE_CODE (gnu_old_type) == ARRAY_TYPE
1593 && TYPE_NONALIASED_COMPONENT (gnu_new_type)
1594 != TYPE_NONALIASED_COMPONENT (gnu_old_type)));
1595
1596 TYPE_ALIAS_SET (gnu_new_type) = get_alias_set (gnu_old_type);
1597 break;
1598
1599 case ALIAS_SET_SUBSET:
1600 case ALIAS_SET_SUPERSET:
1601 {
1602 alias_set_type old_set = get_alias_set (gnu_old_type);
1603 alias_set_type new_set = get_alias_set (gnu_new_type);
1604
1605 /* Do nothing if the alias sets conflict. This ensures that we
1606 never call record_alias_subset several times for the same pair
1607 or at all for alias set 0. */
1608 if (!alias_sets_conflict_p (old_set, new_set))
1609 {
1610 if (op == ALIAS_SET_SUBSET)
1611 record_alias_subset (old_set, new_set);
1612 else
1613 record_alias_subset (new_set, old_set);
1614 }
1615 }
1616 break;
1617
1618 default:
1619 gcc_unreachable ();
1620 }
1621
1622 record_component_aliases (gnu_new_type);
1623}
1624\f
55052087 1625/* Record TYPE as a builtin type for Ada. NAME is the name of the type.
4905002b 1626 ARTIFICIAL_P is true if the type was generated by the compiler. */
27becfc8 1627
1628void
55052087 1629record_builtin_type (const char *name, tree type, bool artificial_p)
27becfc8 1630{
f3c9930a 1631 tree type_decl = build_decl (input_location,
1632 TYPE_DECL, get_identifier (name), type);
55052087 1633 DECL_ARTIFICIAL (type_decl) = artificial_p;
5bf971ee 1634 TYPE_ARTIFICIAL (type) = artificial_p;
515c6c6c 1635 gnat_pushdecl (type_decl, Empty);
27becfc8 1636
515c6c6c 1637 if (debug_hooks->type_decl)
1638 debug_hooks->type_decl (type_decl, false);
27becfc8 1639}
1640\f
96536a9d 1641/* Finish constructing the character type CHAR_TYPE.
1642
1643 In Ada character types are enumeration types and, as a consequence, are
1644 represented in the front-end by integral types holding the positions of
1645 the enumeration values as defined by the language, which means that the
1646 integral types are unsigned.
1647
1648 Unfortunately the signedness of 'char' in C is implementation-defined
1649 and GCC even has the option -fsigned-char to toggle it at run time.
1650 Since GNAT's philosophy is to be compatible with C by default, to wit
1651 Interfaces.C.char is defined as a mere copy of Character, we may need
1652 to declare character types as signed types in GENERIC and generate the
1653 necessary adjustments to make them behave as unsigned types.
1654
1655 The overall strategy is as follows: if 'char' is unsigned, do nothing;
1656 if 'char' is signed, translate character types of CHAR_TYPE_SIZE and
1657 character subtypes with RM_Size = Esize = CHAR_TYPE_SIZE into signed
1658 types. The idea is to ensure that the bit pattern contained in the
1659 Esize'd objects is not changed, even though the numerical value will
5a33bf5a 1660 be interpreted differently depending on the signedness. */
96536a9d 1661
1662void
1663finish_character_type (tree char_type)
1664{
1665 if (TYPE_UNSIGNED (char_type))
1666 return;
1667
ffc23920 1668 /* Make a copy of a generic unsigned version since we'll modify it. */
1669 tree unsigned_char_type
1670 = (char_type == char_type_node
1671 ? unsigned_char_type_node
1672 : copy_type (gnat_unsigned_type_for (char_type)));
96536a9d 1673
5a33bf5a 1674 /* Create an unsigned version of the type and set it as debug type. */
96536a9d 1675 TYPE_NAME (unsigned_char_type) = TYPE_NAME (char_type);
1676 TYPE_STRING_FLAG (unsigned_char_type) = TYPE_STRING_FLAG (char_type);
1677 TYPE_ARTIFICIAL (unsigned_char_type) = TYPE_ARTIFICIAL (char_type);
96536a9d 1678 SET_TYPE_DEBUG_TYPE (char_type, unsigned_char_type);
5a33bf5a 1679
1680 /* If this is a subtype, make the debug type a subtype of the debug type
77532bfb 1681 of the base type and convert literal RM bounds to unsigned. */
5a33bf5a 1682 if (TREE_TYPE (char_type))
1683 {
1684 tree base_unsigned_char_type = TYPE_DEBUG_TYPE (TREE_TYPE (char_type));
77532bfb 1685 tree min_value = TYPE_RM_MIN_VALUE (char_type);
1686 tree max_value = TYPE_RM_MAX_VALUE (char_type);
5a33bf5a 1687
1688 if (TREE_CODE (min_value) == INTEGER_CST)
1689 min_value = fold_convert (base_unsigned_char_type, min_value);
1690 if (TREE_CODE (max_value) == INTEGER_CST)
1691 max_value = fold_convert (base_unsigned_char_type, max_value);
1692
1693 TREE_TYPE (unsigned_char_type) = base_unsigned_char_type;
1694 SET_TYPE_RM_MIN_VALUE (unsigned_char_type, min_value);
1695 SET_TYPE_RM_MAX_VALUE (unsigned_char_type, max_value);
1696 }
1697
77532bfb 1698 /* Adjust the RM bounds of the original type to unsigned; that's especially
5a33bf5a 1699 important for types since they are implicit in this case. */
96536a9d 1700 SET_TYPE_RM_MIN_VALUE (char_type, TYPE_MIN_VALUE (unsigned_char_type));
1701 SET_TYPE_RM_MAX_VALUE (char_type, TYPE_MAX_VALUE (unsigned_char_type));
1702}
1703
41dd28aa 1704/* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1705 finish constructing the record type as a fat pointer type. */
1706
1707void
1708finish_fat_pointer_type (tree record_type, tree field_list)
1709{
1710 /* Make sure we can put it into a register. */
0ade479e 1711 if (STRICT_ALIGNMENT)
5d4b30ea 1712 SET_TYPE_ALIGN (record_type, MIN (BIGGEST_ALIGNMENT, 2 * POINTER_SIZE));
41dd28aa 1713
1714 /* Show what it really is. */
1715 TYPE_FAT_POINTER_P (record_type) = 1;
1716
1717 /* Do not emit debug info for it since the types of its fields may still be
1718 incomplete at this point. */
1719 finish_record_type (record_type, field_list, 0, false);
1720
1721 /* Force type_contains_placeholder_p to return true on it. Although the
1722 PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
1723 type but the representation of the unconstrained array. */
1724 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type) = 2;
1725}
1726
f9001da7 1727/* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
27becfc8 1728 finish constructing the record or union type. If REP_LEVEL is zero, this
1729 record has no representation clause and so will be entirely laid out here.
1730 If REP_LEVEL is one, this record has a representation clause and has been
1731 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
1732 this record is derived from a parent record and thus inherits its layout;
f9001da7 1733 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
baaf92dc 1734 additional debug info needs to be output for this type. */
27becfc8 1735
1736void
f9001da7 1737finish_record_type (tree record_type, tree field_list, int rep_level,
1738 bool debug_info_p)
27becfc8 1739{
1740 enum tree_code code = TREE_CODE (record_type);
48dcce25 1741 tree name = TYPE_IDENTIFIER (record_type);
27becfc8 1742 tree ada_size = bitsize_zero_node;
1743 tree size = bitsize_zero_node;
1744 bool had_size = TYPE_SIZE (record_type) != 0;
1745 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
1746 bool had_align = TYPE_ALIGN (record_type) != 0;
1747 tree field;
1748
f9001da7 1749 TYPE_FIELDS (record_type) = field_list;
27becfc8 1750
515c6c6c 1751 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
1752 generate debug info and have a parallel type. */
515c6c6c 1753 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
27becfc8 1754
1755 /* Globally initialize the record first. If this is a rep'ed record,
1756 that just means some initializations; otherwise, layout the record. */
1757 if (rep_level > 0)
1758 {
5d4b30ea 1759 SET_TYPE_ALIGN (record_type, MAX (BITS_PER_UNIT,
1760 TYPE_ALIGN (record_type)));
27becfc8 1761
1762 if (!had_size_unit)
1763 TYPE_SIZE_UNIT (record_type) = size_zero_node;
02433bf7 1764
27becfc8 1765 if (!had_size)
1766 TYPE_SIZE (record_type) = bitsize_zero_node;
1767
1768 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
1769 out just like a UNION_TYPE, since the size will be fixed. */
1770 else if (code == QUAL_UNION_TYPE)
1771 code = UNION_TYPE;
1772 }
1773 else
1774 {
1775 /* Ensure there isn't a size already set. There can be in an error
1776 case where there is a rep clause but all fields have errors and
1777 no longer have a position. */
1778 TYPE_SIZE (record_type) = 0;
81f9f420 1779
1780 /* Ensure we use the traditional GCC layout for bitfields when we need
1781 to pack the record type or have a representation clause. The other
1782 possible layout (Microsoft C compiler), if available, would prevent
1783 efficient packing in almost all cases. */
1784#ifdef TARGET_MS_BITFIELD_LAYOUT
1785 if (TARGET_MS_BITFIELD_LAYOUT && TYPE_PACKED (record_type))
1786 decl_attributes (&record_type,
1787 tree_cons (get_identifier ("gcc_struct"),
1788 NULL_TREE, NULL_TREE),
1789 ATTR_FLAG_TYPE_IN_PLACE);
1790#endif
1791
27becfc8 1792 layout_type (record_type);
1793 }
1794
1795 /* At this point, the position and size of each field is known. It was
1796 either set before entry by a rep clause, or by laying out the type above.
1797
1798 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
1799 to compute the Ada size; the GCC size and alignment (for rep'ed records
1800 that are not padding types); and the mode (for rep'ed records). We also
1801 clear the DECL_BIT_FIELD indication for the cases we know have not been
1802 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
1803
1804 if (code == QUAL_UNION_TYPE)
f9001da7 1805 field_list = nreverse (field_list);
27becfc8 1806
1767a056 1807 for (field = field_list; field; field = DECL_CHAIN (field))
27becfc8 1808 {
1809 tree type = TREE_TYPE (field);
1810 tree pos = bit_position (field);
1811 tree this_size = DECL_SIZE (field);
1812 tree this_ada_size;
1813
4a17ee95 1814 if (RECORD_OR_UNION_TYPE_P (type)
a98f6bec 1815 && !TYPE_FAT_POINTER_P (type)
27becfc8 1816 && !TYPE_CONTAINS_TEMPLATE_P (type)
1817 && TYPE_ADA_SIZE (type))
1818 this_ada_size = TYPE_ADA_SIZE (type);
1819 else
1820 this_ada_size = this_size;
1821
1822 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
1823 if (DECL_BIT_FIELD (field)
1824 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
1825 {
1826 unsigned int align = TYPE_ALIGN (type);
1827
1828 /* In the general case, type alignment is required. */
1829 if (value_factor_p (pos, align))
1830 {
1831 /* The enclosing record type must be sufficiently aligned.
1832 Otherwise, if no alignment was specified for it and it
1833 has been laid out already, bump its alignment to the
4ca76485 1834 desired one if this is compatible with its size and
1835 maximum alignment, if any. */
27becfc8 1836 if (TYPE_ALIGN (record_type) >= align)
1837 {
5d4b30ea 1838 SET_DECL_ALIGN (field, MAX (DECL_ALIGN (field), align));
27becfc8 1839 DECL_BIT_FIELD (field) = 0;
1840 }
1841 else if (!had_align
1842 && rep_level == 0
4ca76485 1843 && value_factor_p (TYPE_SIZE (record_type), align)
1844 && (!TYPE_MAX_ALIGN (record_type)
1845 || TYPE_MAX_ALIGN (record_type) >= align))
27becfc8 1846 {
5d4b30ea 1847 SET_TYPE_ALIGN (record_type, align);
1848 SET_DECL_ALIGN (field, MAX (DECL_ALIGN (field), align));
27becfc8 1849 DECL_BIT_FIELD (field) = 0;
1850 }
1851 }
1852
1853 /* In the non-strict alignment case, only byte alignment is. */
1854 if (!STRICT_ALIGNMENT
1855 && DECL_BIT_FIELD (field)
1856 && value_factor_p (pos, BITS_PER_UNIT))
1857 DECL_BIT_FIELD (field) = 0;
1858 }
1859
211df513 1860 /* If we still have DECL_BIT_FIELD set at this point, we know that the
1861 field is technically not addressable. Except that it can actually
1862 be addressed if it is BLKmode and happens to be properly aligned. */
1863 if (DECL_BIT_FIELD (field)
1864 && !(DECL_MODE (field) == BLKmode
1865 && value_factor_p (pos, BITS_PER_UNIT)))
1866 DECL_NONADDRESSABLE_P (field) = 1;
27becfc8 1867
1868 /* A type must be as aligned as its most aligned field that is not
1869 a bit-field. But this is already enforced by layout_type. */
1870 if (rep_level > 0 && !DECL_BIT_FIELD (field))
5d4b30ea 1871 SET_TYPE_ALIGN (record_type,
1872 MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field)));
27becfc8 1873
1874 switch (code)
1875 {
1876 case UNION_TYPE:
1877 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
1878 size = size_binop (MAX_EXPR, size, this_size);
1879 break;
1880
1881 case QUAL_UNION_TYPE:
1882 ada_size
1883 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1884 this_ada_size, ada_size);
1885 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1886 this_size, size);
1887 break;
1888
1889 case RECORD_TYPE:
1890 /* Since we know here that all fields are sorted in order of
1891 increasing bit position, the size of the record is one
1892 higher than the ending bit of the last field processed
1893 unless we have a rep clause, since in that case we might
1894 have a field outside a QUAL_UNION_TYPE that has a higher ending
1895 position. So use a MAX in that case. Also, if this field is a
1896 QUAL_UNION_TYPE, we need to take into account the previous size in
1897 the case of empty variants. */
1898 ada_size
1899 = merge_sizes (ada_size, pos, this_ada_size,
1900 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1901 size
1902 = merge_sizes (size, pos, this_size,
1903 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1904 break;
1905
1906 default:
1907 gcc_unreachable ();
1908 }
1909 }
1910
1911 if (code == QUAL_UNION_TYPE)
f9001da7 1912 nreverse (field_list);
27becfc8 1913
1914 if (rep_level < 2)
1915 {
1916 /* If this is a padding record, we never want to make the size smaller
1917 than what was specified in it, if any. */
a98f6bec 1918 if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
27becfc8 1919 size = TYPE_SIZE (record_type);
1920
1921 /* Now set any of the values we've just computed that apply. */
a98f6bec 1922 if (!TYPE_FAT_POINTER_P (record_type)
27becfc8 1923 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
1924 SET_TYPE_ADA_SIZE (record_type, ada_size);
1925
1926 if (rep_level > 0)
1927 {
1928 tree size_unit = had_size_unit
1929 ? TYPE_SIZE_UNIT (record_type)
1930 : convert (sizetype,
1931 size_binop (CEIL_DIV_EXPR, size,
1932 bitsize_unit_node));
1933 unsigned int align = TYPE_ALIGN (record_type);
1934
1935 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
1936 TYPE_SIZE_UNIT (record_type)
1937 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
1938
1939 compute_record_mode (record_type);
1940 }
1941 }
1942
4ca76485 1943 /* Reset the TYPE_MAX_ALIGN field since it's private to gigi. */
1944 TYPE_MAX_ALIGN (record_type) = 0;
1945
f9001da7 1946 if (debug_info_p)
27becfc8 1947 rest_of_record_type_compilation (record_type);
1948}
1949
ba502e2b 1950/* Append PARALLEL_TYPE on the chain of parallel types of TYPE. If
1951 PARRALEL_TYPE has no context and its computation is not deferred yet, also
1952 propagate TYPE's context to PARALLEL_TYPE's or defer its propagation to the
1953 moment TYPE will get a context. */
ae16f71d 1954
1955void
1956add_parallel_type (tree type, tree parallel_type)
1957{
1958 tree decl = TYPE_STUB_DECL (type);
1959
1960 while (DECL_PARALLEL_TYPE (decl))
1961 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
1962
1963 SET_DECL_PARALLEL_TYPE (decl, parallel_type);
ba502e2b 1964
1965 /* If PARALLEL_TYPE already has a context, we are done. */
ea780bd9 1966 if (TYPE_CONTEXT (parallel_type))
ba502e2b 1967 return;
1968
ea780bd9 1969 /* Otherwise, try to get one from TYPE's context. If so, simply propagate
1970 it to PARALLEL_TYPE. */
1971 if (TYPE_CONTEXT (type))
ba502e2b 1972 gnat_set_type_context (parallel_type, TYPE_CONTEXT (type));
1973
ea780bd9 1974 /* Otherwise TYPE has not context yet. We know it will have one thanks to
1975 gnat_pushdecl and then its context will be propagated to PARALLEL_TYPE,
1976 so we have nothing to do in this case. */
ae16f71d 1977}
1978
1979/* Return true if TYPE has a parallel type. */
1980
1981static bool
1982has_parallel_type (tree type)
1983{
1984 tree decl = TYPE_STUB_DECL (type);
1985
1986 return DECL_PARALLEL_TYPE (decl) != NULL_TREE;
1987}
1988
baaf92dc 1989/* Wrap up compilation of RECORD_TYPE, i.e. output additional debug info
1990 associated with it. It need not be invoked directly in most cases as
1991 finish_record_type takes care of doing so. */
27becfc8 1992
1993void
1994rest_of_record_type_compilation (tree record_type)
1995{
27becfc8 1996 bool var_size = false;
96e7147a 1997 tree field;
27becfc8 1998
96e7147a 1999 /* If this is a padded type, the bulk of the debug info has already been
2000 generated for the field's type. */
2001 if (TYPE_IS_PADDING_P (record_type))
2002 return;
2003
ae16f71d 2004 /* If the type already has a parallel type (XVS type), then we're done. */
2005 if (has_parallel_type (record_type))
2006 return;
2007
96e7147a 2008 for (field = TYPE_FIELDS (record_type); field; field = DECL_CHAIN (field))
27becfc8 2009 {
2010 /* We need to make an XVE/XVU record if any field has variable size,
2011 whether or not the record does. For example, if we have a union,
2012 it may be that all fields, rounded up to the alignment, have the
2013 same size, in which case we'll use that size. But the debug
2014 output routines (except Dwarf2) won't be able to output the fields,
2015 so we need to make the special record. */
2016 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
2017 /* If a field has a non-constant qualifier, the record will have
2018 variable size too. */
96e7147a 2019 || (TREE_CODE (record_type) == QUAL_UNION_TYPE
27becfc8 2020 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
2021 {
2022 var_size = true;
2023 break;
2024 }
2025 }
2026
96e7147a 2027 /* If this record type is of variable size, make a parallel record type that
2028 will tell the debugger how the former is laid out (see exp_dbug.ads). */
b20f41dd 2029 if (var_size && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
27becfc8 2030 {
2031 tree new_record_type
2032 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
2033 ? UNION_TYPE : TREE_CODE (record_type));
48dcce25 2034 tree orig_name = TYPE_IDENTIFIER (record_type), new_name;
27becfc8 2035 tree last_pos = bitsize_zero_node;
e3698827 2036 tree old_field, prev_old_field = NULL_TREE;
27becfc8 2037
e3698827 2038 new_name
2039 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
2040 ? "XVU" : "XVE");
2041 TYPE_NAME (new_record_type) = new_name;
5d4b30ea 2042 SET_TYPE_ALIGN (new_record_type, BIGGEST_ALIGNMENT);
27becfc8 2043 TYPE_STUB_DECL (new_record_type)
e3698827 2044 = create_type_stub_decl (new_name, new_record_type);
27becfc8 2045 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
2046 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
5be881ba 2047 gnat_pushdecl (TYPE_STUB_DECL (new_record_type), Empty);
27becfc8 2048 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
2049 TYPE_SIZE_UNIT (new_record_type)
2050 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
2051
819ab09a 2052 /* Now scan all the fields, replacing each field with a new field
2053 corresponding to the new encoding. */
27becfc8 2054 for (old_field = TYPE_FIELDS (record_type); old_field;
1767a056 2055 old_field = DECL_CHAIN (old_field))
27becfc8 2056 {
2057 tree field_type = TREE_TYPE (old_field);
2058 tree field_name = DECL_NAME (old_field);
42a9a9c6 2059 tree curpos = fold_bit_position (old_field);
819ab09a 2060 tree pos, new_field;
27becfc8 2061 bool var = false;
2062 unsigned int align = 0;
27becfc8 2063
819ab09a 2064 /* See how the position was modified from the last position.
27becfc8 2065
819ab09a 2066 There are two basic cases we support: a value was added
2067 to the last position or the last position was rounded to
2068 a boundary and they something was added. Check for the
2069 first case first. If not, see if there is any evidence
2070 of rounding. If so, round the last position and retry.
27becfc8 2071
819ab09a 2072 If this is a union, the position can be taken as zero. */
27becfc8 2073 if (TREE_CODE (new_record_type) == UNION_TYPE)
819ab09a 2074 pos = bitsize_zero_node;
27becfc8 2075 else
2076 pos = compute_related_constant (curpos, last_pos);
2077
819ab09a 2078 if (!pos
2079 && TREE_CODE (curpos) == MULT_EXPR
e913b5cd 2080 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1)))
27becfc8 2081 {
2082 tree offset = TREE_OPERAND (curpos, 0);
e913b5cd 2083 align = tree_to_uhwi (TREE_OPERAND (curpos, 1));
819ab09a 2084 align = scale_by_factor_of (offset, align);
2085 last_pos = round_up (last_pos, align);
2086 pos = compute_related_constant (curpos, last_pos);
27becfc8 2087 }
819ab09a 2088 else if (!pos
2089 && TREE_CODE (curpos) == PLUS_EXPR
6b409616 2090 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1))
27becfc8 2091 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
6b409616 2092 && tree_fits_uhwi_p
2093 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1)))
27becfc8 2094 {
819ab09a 2095 tree offset = TREE_OPERAND (TREE_OPERAND (curpos, 0), 0);
2096 unsigned HOST_WIDE_INT addend
6b409616 2097 = tree_to_uhwi (TREE_OPERAND (curpos, 1));
27becfc8 2098 align
6b409616 2099 = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1));
819ab09a 2100 align = scale_by_factor_of (offset, align);
2101 align = MIN (align, addend & -addend);
2102 last_pos = round_up (last_pos, align);
2103 pos = compute_related_constant (curpos, last_pos);
27becfc8 2104 }
819ab09a 2105 else if (potential_alignment_gap (prev_old_field, old_field, pos))
27becfc8 2106 {
2107 align = TYPE_ALIGN (field_type);
819ab09a 2108 last_pos = round_up (last_pos, align);
2109 pos = compute_related_constant (curpos, last_pos);
27becfc8 2110 }
2111
2112 /* If we can't compute a position, set it to zero.
2113
819ab09a 2114 ??? We really should abort here, but it's too much work
2115 to get this correct for all cases. */
27becfc8 2116 if (!pos)
2117 pos = bitsize_zero_node;
2118
2119 /* See if this type is variable-sized and make a pointer type
2120 and indicate the indirection if so. Beware that the debug
2121 back-end may adjust the position computed above according
2122 to the alignment of the field type, i.e. the pointer type
2123 in this case, if we don't preventively counter that. */
2124 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
2125 {
2126 field_type = build_pointer_type (field_type);
2127 if (align != 0 && TYPE_ALIGN (field_type) > align)
2128 {
baaf92dc 2129 field_type = copy_type (field_type);
5d4b30ea 2130 SET_TYPE_ALIGN (field_type, align);
27becfc8 2131 }
2132 var = true;
2133 }
2134
2135 /* Make a new field name, if necessary. */
2136 if (var || align != 0)
2137 {
2138 char suffix[16];
2139
2140 if (align != 0)
2141 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
2142 align / BITS_PER_UNIT);
2143 else
2144 strcpy (suffix, "XVL");
2145
e3698827 2146 field_name = concat_name (field_name, suffix);
27becfc8 2147 }
2148
d51eba1a 2149 new_field
2150 = create_field_decl (field_name, field_type, new_record_type,
2151 DECL_SIZE (old_field), pos, 0, 0);
1767a056 2152 DECL_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
27becfc8 2153 TYPE_FIELDS (new_record_type) = new_field;
2154
2155 /* If old_field is a QUAL_UNION_TYPE, take its size as being
2156 zero. The only time it's not the last field of the record
2157 is when there are other components at fixed positions after
2158 it (meaning there was a rep clause for every field) and we
2159 want to be able to encode them. */
42a9a9c6 2160 last_pos = size_binop (PLUS_EXPR, curpos,
27becfc8 2161 (TREE_CODE (TREE_TYPE (old_field))
2162 == QUAL_UNION_TYPE)
2163 ? bitsize_zero_node
2164 : DECL_SIZE (old_field));
2165 prev_old_field = old_field;
2166 }
2167
96e7147a 2168 TYPE_FIELDS (new_record_type) = nreverse (TYPE_FIELDS (new_record_type));
27becfc8 2169
ae16f71d 2170 add_parallel_type (record_type, new_record_type);
27becfc8 2171 }
27becfc8 2172}
2173
27becfc8 2174/* Utility function of above to merge LAST_SIZE, the previous size of a record
32826d65 2175 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
2176 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
2177 replace a value of zero with the old size. If HAS_REP is true, we take the
2178 MAX of the end position of this field with LAST_SIZE. In all other cases,
2179 we use FIRST_BIT plus SIZE. Return an expression for the size. */
27becfc8 2180
2181static tree
2182merge_sizes (tree last_size, tree first_bit, tree size, bool special,
2183 bool has_rep)
2184{
2185 tree type = TREE_TYPE (last_size);
c88e6a4f 2186 tree new_size;
27becfc8 2187
2188 if (!special || TREE_CODE (size) != COND_EXPR)
2189 {
c88e6a4f 2190 new_size = size_binop (PLUS_EXPR, first_bit, size);
27becfc8 2191 if (has_rep)
c88e6a4f 2192 new_size = size_binop (MAX_EXPR, last_size, new_size);
27becfc8 2193 }
2194
2195 else
c88e6a4f 2196 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
2197 integer_zerop (TREE_OPERAND (size, 1))
2198 ? last_size : merge_sizes (last_size, first_bit,
2199 TREE_OPERAND (size, 1),
2200 1, has_rep),
2201 integer_zerop (TREE_OPERAND (size, 2))
2202 ? last_size : merge_sizes (last_size, first_bit,
2203 TREE_OPERAND (size, 2),
2204 1, has_rep));
27becfc8 2205
2206 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
2207 when fed through substitute_in_expr) into thinking that a constant
2208 size is not constant. */
c88e6a4f 2209 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
2210 new_size = TREE_OPERAND (new_size, 0);
27becfc8 2211
c88e6a4f 2212 return new_size;
27becfc8 2213}
2214
42a9a9c6 2215/* Return the bit position of FIELD, in bits from the start of the record,
2216 and fold it as much as possible. This is a tree of type bitsizetype. */
2217
2218static tree
2219fold_bit_position (const_tree field)
2220{
2221 tree offset = DECL_FIELD_OFFSET (field);
2222 if (TREE_CODE (offset) == MULT_EXPR || TREE_CODE (offset) == PLUS_EXPR)
2223 offset = size_binop (TREE_CODE (offset),
2224 fold_convert (bitsizetype, TREE_OPERAND (offset, 0)),
2225 fold_convert (bitsizetype, TREE_OPERAND (offset, 1)));
2226 else
2227 offset = fold_convert (bitsizetype, offset);
2228 return size_binop (PLUS_EXPR, DECL_FIELD_BIT_OFFSET (field),
2229 size_binop (MULT_EXPR, offset, bitsize_unit_node));
2230}
2231
27becfc8 2232/* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
2233 related by the addition of a constant. Return that constant if so. */
2234
2235static tree
2236compute_related_constant (tree op0, tree op1)
2237{
42a9a9c6 2238 tree factor, op0_var, op1_var, op0_cst, op1_cst, result;
27becfc8 2239
42a9a9c6 2240 if (TREE_CODE (op0) == MULT_EXPR
2241 && TREE_CODE (op1) == MULT_EXPR
2242 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST
2243 && TREE_OPERAND (op1, 1) == TREE_OPERAND (op0, 1))
2244 {
2245 factor = TREE_OPERAND (op0, 1);
2246 op0 = TREE_OPERAND (op0, 0);
2247 op1 = TREE_OPERAND (op1, 0);
2248 }
27becfc8 2249 else
42a9a9c6 2250 factor = NULL_TREE;
2251
2252 op0_cst = split_plus (op0, &op0_var);
2253 op1_cst = split_plus (op1, &op1_var);
2254 result = size_binop (MINUS_EXPR, op0_cst, op1_cst);
2255
2256 if (operand_equal_p (op0_var, op1_var, 0))
2257 return factor ? size_binop (MULT_EXPR, factor, result) : result;
2258
2259 return NULL_TREE;
27becfc8 2260}
2261
2262/* Utility function of above to split a tree OP which may be a sum, into a
2263 constant part, which is returned, and a variable part, which is stored
2264 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
2265 bitsizetype. */
2266
2267static tree
2268split_plus (tree in, tree *pvar)
2269{
7ad4f11d 2270 /* Strip conversions in order to ease the tree traversal and maximize the
2271 potential for constant or plus/minus discovery. We need to be careful
27becfc8 2272 to always return and set *pvar to bitsizetype trees, but it's worth
2273 the effort. */
7ad4f11d 2274 in = remove_conversions (in, false);
27becfc8 2275
2276 *pvar = convert (bitsizetype, in);
2277
2278 if (TREE_CODE (in) == INTEGER_CST)
2279 {
2280 *pvar = bitsize_zero_node;
2281 return convert (bitsizetype, in);
2282 }
2283 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
2284 {
2285 tree lhs_var, rhs_var;
2286 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
2287 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
2288
2289 if (lhs_var == TREE_OPERAND (in, 0)
2290 && rhs_var == TREE_OPERAND (in, 1))
2291 return bitsize_zero_node;
2292
2293 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
2294 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
2295 }
2296 else
2297 return bitsize_zero_node;
2298}
2299\f
27becfc8 2300/* Return a copy of TYPE but safe to modify in any way. */
2301
2302tree
2303copy_type (tree type)
2304{
c88e6a4f 2305 tree new_type = copy_node (type);
27becfc8 2306
170d361e 2307 /* Unshare the language-specific data. */
2308 if (TYPE_LANG_SPECIFIC (type))
2309 {
2310 TYPE_LANG_SPECIFIC (new_type) = NULL;
2311 SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
2312 }
2313
2314 /* And the contents of the language-specific slot if needed. */
2315 if ((INTEGRAL_TYPE_P (type) || TREE_CODE (type) == REAL_TYPE)
2316 && TYPE_RM_VALUES (type))
2317 {
2318 TYPE_RM_VALUES (new_type) = NULL_TREE;
2319 SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
2320 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
2321 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
2322 }
2323
27becfc8 2324 /* copy_node clears this field instead of copying it, because it is
2325 aliased with TREE_CHAIN. */
c88e6a4f 2326 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
27becfc8 2327
baaf92dc 2328 TYPE_POINTER_TO (new_type) = NULL_TREE;
2329 TYPE_REFERENCE_TO (new_type) = NULL_TREE;
c88e6a4f 2330 TYPE_MAIN_VARIANT (new_type) = new_type;
baaf92dc 2331 TYPE_NEXT_VARIANT (new_type) = NULL_TREE;
17eb96ba 2332 TYPE_CANONICAL (new_type) = new_type;
27becfc8 2333
c88e6a4f 2334 return new_type;
27becfc8 2335}
2336\f
211df513 2337/* Return a subtype of sizetype with range MIN to MAX and whose
2338 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
2339 of the associated TYPE_DECL. */
27becfc8 2340
2341tree
2342create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
2343{
2344 /* First build a type for the desired range. */
47c154d9 2345 tree type = build_nonshared_range_type (sizetype, min, max);
27becfc8 2346
47c154d9 2347 /* Then set the index type. */
27becfc8 2348 SET_TYPE_INDEX_TYPE (type, index);
081f18cf 2349 create_type_decl (NULL_TREE, type, true, false, gnat_node);
211df513 2350
27becfc8 2351 return type;
2352}
a9538d68 2353
2354/* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
2355 sizetype is used. */
2356
2357tree
2358create_range_type (tree type, tree min, tree max)
2359{
2360 tree range_type;
2361
ea780bd9 2362 if (!type)
a9538d68 2363 type = sizetype;
2364
2365 /* First build a type with the base range. */
47c154d9 2366 range_type = build_nonshared_range_type (type, TYPE_MIN_VALUE (type),
2367 TYPE_MAX_VALUE (type));
a9538d68 2368
2369 /* Then set the actual range. */
6fa4bf38 2370 SET_TYPE_RM_MIN_VALUE (range_type, min);
2371 SET_TYPE_RM_MAX_VALUE (range_type, max);
a9538d68 2372
2373 return range_type;
2374}
27becfc8 2375\f
7a0ae4af 2376/* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of TYPE.
2377 NAME gives the name of the type to be used in the declaration. */
515c6c6c 2378
2379tree
7a0ae4af 2380create_type_stub_decl (tree name, tree type)
515c6c6c 2381{
7a0ae4af 2382 tree type_decl = build_decl (input_location, TYPE_DECL, name, type);
515c6c6c 2383 DECL_ARTIFICIAL (type_decl) = 1;
5bf971ee 2384 TYPE_ARTIFICIAL (type) = 1;
515c6c6c 2385 return type_decl;
2386}
2387
7a0ae4af 2388/* Return a TYPE_DECL node for TYPE. NAME gives the name of the type to be
2389 used in the declaration. ARTIFICIAL_P is true if the declaration was
2390 generated by the compiler. DEBUG_INFO_P is true if we need to write
2391 debug information about this type. GNAT_NODE is used for the position
2392 of the decl. */
27becfc8 2393
2394tree
7a0ae4af 2395create_type_decl (tree name, tree type, bool artificial_p, bool debug_info_p,
2396 Node_Id gnat_node)
27becfc8 2397{
27becfc8 2398 enum tree_code code = TREE_CODE (type);
7a0ae4af 2399 bool is_named
2400 = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
515c6c6c 2401 tree type_decl;
27becfc8 2402
515c6c6c 2403 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
2404 gcc_assert (!TYPE_IS_DUMMY_P (type));
27becfc8 2405
515c6c6c 2406 /* If the type hasn't been named yet, we're naming it; preserve an existing
2407 TYPE_STUB_DECL that has been attached to it for some purpose. */
7a0ae4af 2408 if (!is_named && TYPE_STUB_DECL (type))
515c6c6c 2409 {
2410 type_decl = TYPE_STUB_DECL (type);
7a0ae4af 2411 DECL_NAME (type_decl) = name;
515c6c6c 2412 }
2413 else
7a0ae4af 2414 type_decl = build_decl (input_location, TYPE_DECL, name, type);
27becfc8 2415
515c6c6c 2416 DECL_ARTIFICIAL (type_decl) = artificial_p;
5bf971ee 2417 TYPE_ARTIFICIAL (type) = artificial_p;
ac45dde2 2418
2419 /* Add this decl to the current binding level. */
515c6c6c 2420 gnat_pushdecl (type_decl, gnat_node);
ac45dde2 2421
a10d3a24 2422 /* If we're naming the type, equate the TYPE_STUB_DECL to the name. This
2423 causes the name to be also viewed as a "tag" by the debug back-end, with
2424 the advantage that no DW_TAG_typedef is emitted for artificial "tagged"
2425 types in DWARF.
2426
2427 Note that if "type" is used as a DECL_ORIGINAL_TYPE, it may be referenced
2428 from multiple contexts, and "type_decl" references a copy of it: in such a
2429 case, do not mess TYPE_STUB_DECL: we do not want to re-use the TYPE_DECL
2430 with the mechanism above. */
7a0ae4af 2431 if (!is_named && type != DECL_ORIGINAL_TYPE (type_decl))
515c6c6c 2432 TYPE_STUB_DECL (type) = type_decl;
2433
3fb9f0d4 2434 /* Do not generate debug info for UNCONSTRAINED_ARRAY_TYPE that the
2435 back-end doesn't support, and for others if we don't need to. */
27becfc8 2436 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
2437 DECL_IGNORED_P (type_decl) = 1;
27becfc8 2438
2439 return type_decl;
2440}
515c6c6c 2441\f
27becfc8 2442/* Return a VAR_DECL or CONST_DECL node.
2443
7a0ae4af 2444 NAME gives the name of the variable. ASM_NAME is its assembler name
2445 (if provided). TYPE is its data type (a GCC ..._TYPE node). INIT is
27becfc8 2446 the GCC tree for an optional initial expression; NULL_TREE if none.
2447
2448 CONST_FLAG is true if this variable is constant, in which case we might
2449 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
2450
2451 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
2452 definition to be made visible outside of the current compilation unit, for
2453 instance variable definitions in a package specification.
2454
32826d65 2455 EXTERN_FLAG is true when processing an external variable declaration (as
27becfc8 2456 opposed to a definition: no storage is to be allocated for the variable).
2457
c0df8418 2458 STATIC_FLAG is only relevant when not at top level and indicates whether
2459 to always allocate storage to the variable.
2460
2461 VOLATILE_FLAG is true if this variable is declared as volatile.
27becfc8 2462
4905002b 2463 ARTIFICIAL_P is true if the variable was generated by the compiler.
2464
2465 DEBUG_INFO_P is true if we need to write debug information for it.
2466
c4e63392 2467 ATTR_LIST is the list of attributes to be attached to the variable.
2468
27becfc8 2469 GNAT_NODE is used for the position of the decl. */
2470
2471tree
7a0ae4af 2472create_var_decl (tree name, tree asm_name, tree type, tree init,
2473 bool const_flag, bool public_flag, bool extern_flag,
c0df8418 2474 bool static_flag, bool volatile_flag, bool artificial_p,
2475 bool debug_info_p, struct attrib *attr_list,
2476 Node_Id gnat_node, bool const_decl_allowed_p)
27becfc8 2477{
7464361a 2478 /* Whether the object has static storage duration, either explicitly or by
2479 virtue of being declared at the global level. */
2480 const bool static_storage = static_flag || global_bindings_p ();
2481
2482 /* Whether the initializer is constant: for an external object or an object
2483 with static storage duration, we check that the initializer is a valid
2484 constant expression for initializing a static variable; otherwise, we
2485 only check that it is constant. */
2486 const bool init_const
7a0ae4af 2487 = (init
2488 && gnat_types_compatible_p (type, TREE_TYPE (init))
7464361a 2489 && (extern_flag || static_storage
7a0ae4af 2490 ? initializer_constant_valid_p (init, TREE_TYPE (init))
7464361a 2491 != NULL_TREE
7a0ae4af 2492 : TREE_CONSTANT (init)));
27becfc8 2493
2494 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
7464361a 2495 case the initializer may be used in lieu of the DECL node (as done in
27becfc8 2496 Identifier_to_gnu). This is useful to prevent the need of elaboration
7464361a 2497 code when an identifier for which such a DECL is made is in turn used
2498 as an initializer. We used to rely on CONST_DECL vs VAR_DECL for this,
2499 but extra constraints apply to this choice (see below) and they are not
2500 relevant to the distinction we wish to make. */
2501 const bool constant_p = const_flag && init_const;
27becfc8 2502
2503 /* The actual DECL node. CONST_DECL was initially intended for enumerals
2504 and may be used for scalars in general but not for aggregates. */
2505 tree var_decl
f3c9930a 2506 = build_decl (input_location,
78f7c4e5 2507 (constant_p
2508 && const_decl_allowed_p
2509 && !AGGREGATE_TYPE_P (type) ? CONST_DECL : VAR_DECL),
7a0ae4af 2510 name, type);
27becfc8 2511
95164e71 2512 /* Detect constants created by the front-end to hold 'reference to function
2513 calls for stabilization purposes. This is needed for renaming. */
2514 if (const_flag && init && POINTER_TYPE_P (type))
2515 {
2516 tree inner = init;
2517 if (TREE_CODE (inner) == COMPOUND_EXPR)
2518 inner = TREE_OPERAND (inner, 1);
2519 inner = remove_conversions (inner, true);
2520 if (TREE_CODE (inner) == ADDR_EXPR
2521 && ((TREE_CODE (TREE_OPERAND (inner, 0)) == CALL_EXPR
2522 && !call_is_atomic_load (TREE_OPERAND (inner, 0)))
2523 || (TREE_CODE (TREE_OPERAND (inner, 0)) == VAR_DECL
2524 && DECL_RETURN_VALUE_P (TREE_OPERAND (inner, 0)))))
2525 DECL_RETURN_VALUE_P (var_decl) = 1;
2526 }
2527
27becfc8 2528 /* If this is external, throw away any initializations (they will be done
2529 elsewhere) unless this is a constant for which we would like to remain
2530 able to get the initializer. If we are defining a global here, leave a
2531 constant initialization and save any variable elaborations for the
2532 elaboration routine. If we are just annotating types, throw away the
2533 initialization if it isn't a constant. */
0dfdb37a 2534 if ((extern_flag && !constant_p)
7a0ae4af 2535 || (type_annotate_only && init && !TREE_CONSTANT (init)))
0dfdb37a 2536 init = NULL_TREE;
27becfc8 2537
7464361a 2538 /* At the global level, a non-constant initializer generates elaboration
2539 statements. Check that such statements are allowed, that is to say,
2540 not violating a No_Elaboration_Code restriction. */
7a0ae4af 2541 if (init && !init_const && global_bindings_p ())
27becfc8 2542 Check_Elaboration_Code_Allowed (gnat_node);
e4f9d5d8 2543
4905002b 2544 /* Attach the initializer, if any. */
7a0ae4af 2545 DECL_INITIAL (var_decl) = init;
4905002b 2546
2547 /* Directly set some flags. */
2548 DECL_ARTIFICIAL (var_decl) = artificial_p;
e2b35abe 2549 DECL_EXTERNAL (var_decl) = extern_flag;
27becfc8 2550
17e5248e 2551 TREE_CONSTANT (var_decl) = constant_p;
2552 TREE_READONLY (var_decl) = const_flag;
2553
2554 /* The object is public if it is external or if it is declared public
2555 and has static storage duration. */
2556 TREE_PUBLIC (var_decl) = extern_flag || (public_flag && static_storage);
2557
2558 /* We need to allocate static storage for an object with static storage
2559 duration if it isn't external. */
2560 TREE_STATIC (var_decl) = !extern_flag && static_storage;
2561
2562 TREE_SIDE_EFFECTS (var_decl)
2563 = TREE_THIS_VOLATILE (var_decl)
2564 = TYPE_VOLATILE (type) | volatile_flag;
2565
2566 if (TREE_SIDE_EFFECTS (var_decl))
2567 TREE_ADDRESSABLE (var_decl) = 1;
2568
27becfc8 2569 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
2570 try to fiddle with DECL_COMMON. However, on platforms that don't
2571 support global BSS sections, uninitialized global variables would
2572 go in DATA instead, thus increasing the size of the executable. */
2573 if (!flag_no_common
2574 && TREE_CODE (var_decl) == VAR_DECL
e4f9d5d8 2575 && TREE_PUBLIC (var_decl)
27becfc8 2576 && !have_global_bss_p ())
2577 DECL_COMMON (var_decl) = 1;
27becfc8 2578
4905002b 2579 /* Do not emit debug info for a CONST_DECL if optimization isn't enabled,
2580 since we will create an associated variable. Likewise for an external
2581 constant whose initializer is not absolute, because this would mean a
2582 global relocation in a read-only section which runs afoul of the PE-COFF
2583 run-time relocation mechanism. */
2584 if (!debug_info_p
2585 || (TREE_CODE (var_decl) == CONST_DECL && !optimize)
2586 || (extern_flag
2587 && constant_p
7a0ae4af 2588 && init
2589 && initializer_constant_valid_p (init, TREE_TYPE (init))
4905002b 2590 != null_pointer_node))
477305bf 2591 DECL_IGNORED_P (var_decl) = 1;
2592
081f18cf 2593 /* ??? Some attributes cannot be applied to CONST_DECLs. */
2594 if (TREE_CODE (var_decl) == VAR_DECL)
2595 process_attributes (&var_decl, &attr_list, true, gnat_node);
2596
2597 /* Add this decl to the current binding level. */
2598 gnat_pushdecl (var_decl, gnat_node);
2599
0c6fd2e5 2600 if (TREE_CODE (var_decl) == VAR_DECL && asm_name)
27becfc8 2601 {
0c6fd2e5 2602 /* Let the target mangle the name if this isn't a verbatim asm. */
2603 if (*IDENTIFIER_POINTER (asm_name) != '*')
2604 asm_name = targetm.mangle_decl_assembler_name (var_decl, asm_name);
081f18cf 2605
0c6fd2e5 2606 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
27becfc8 2607 }
27becfc8 2608
2609 return var_decl;
2610}
2611\f
2612/* Return true if TYPE, an aggregate type, contains (or is) an array. */
2613
2614static bool
2615aggregate_type_contains_array_p (tree type)
2616{
2617 switch (TREE_CODE (type))
2618 {
2619 case RECORD_TYPE:
2620 case UNION_TYPE:
2621 case QUAL_UNION_TYPE:
2622 {
2623 tree field;
1767a056 2624 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
27becfc8 2625 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
2626 && aggregate_type_contains_array_p (TREE_TYPE (field)))
2627 return true;
2628 return false;
2629 }
2630
2631 case ARRAY_TYPE:
2632 return true;
2633
2634 default:
2635 gcc_unreachable ();
2636 }
2637}
2638
7a0ae4af 2639/* Return a FIELD_DECL node. NAME is the field's name, TYPE is its type and
2640 RECORD_TYPE is the type of the enclosing record. If SIZE is nonzero, it
2641 is the specified size of the field. If POS is nonzero, it is the bit
2642 position. PACKED is 1 if the enclosing record is packed, -1 if it has
2643 Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
81c30835 2644 means we are allowed to take the address of the field; if it is negative,
2645 we should not make a bitfield, which is used by make_aligning_type. */
27becfc8 2646
2647tree
7a0ae4af 2648create_field_decl (tree name, tree type, tree record_type, tree size, tree pos,
2649 int packed, int addressable)
27becfc8 2650{
7a0ae4af 2651 tree field_decl = build_decl (input_location, FIELD_DECL, name, type);
27becfc8 2652
2653 DECL_CONTEXT (field_decl) = record_type;
7a0ae4af 2654 TREE_READONLY (field_decl) = TYPE_READONLY (type);
27becfc8 2655
2656 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
2657 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
2658 Likewise for an aggregate without specified position that contains an
2659 array, because in this case slices of variable length of this array
2660 must be handled by GCC and variable-sized objects need to be aligned
2661 to at least a byte boundary. */
7a0ae4af 2662 if (packed && (TYPE_MODE (type) == BLKmode
27becfc8 2663 || (!pos
7a0ae4af 2664 && AGGREGATE_TYPE_P (type)
2665 && aggregate_type_contains_array_p (type))))
5d4b30ea 2666 SET_DECL_ALIGN (field_decl, BITS_PER_UNIT);
27becfc8 2667
2668 /* If a size is specified, use it. Otherwise, if the record type is packed
2669 compute a size to use, which may differ from the object's natural size.
2670 We always set a size in this case to trigger the checks for bitfield
2671 creation below, which is typically required when no position has been
2672 specified. */
2673 if (size)
2674 size = convert (bitsizetype, size);
2675 else if (packed == 1)
2676 {
7a0ae4af 2677 size = rm_size (type);
2678 if (TYPE_MODE (type) == BLKmode)
81c30835 2679 size = round_up (size, BITS_PER_UNIT);
27becfc8 2680 }
2681
2682 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
2683 specified for two reasons: first if the size differs from the natural
2684 size. Second, if the alignment is insufficient. There are a number of
2685 ways the latter can be true.
2686
2687 We never make a bitfield if the type of the field has a nonconstant size,
2688 because no such entity requiring bitfield operations should reach here.
2689
2690 We do *preventively* make a bitfield when there might be the need for it
2691 but we don't have all the necessary information to decide, as is the case
2692 of a field with no specified position in a packed record.
2693
2694 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
2695 in layout_decl or finish_record_type to clear the bit_field indication if
2696 it is in fact not needed. */
2697 if (addressable >= 0
2698 && size
2699 && TREE_CODE (size) == INTEGER_CST
7a0ae4af 2700 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2701 && (!tree_int_cst_equal (size, TYPE_SIZE (type))
2702 || (pos && !value_factor_p (pos, TYPE_ALIGN (type)))
27becfc8 2703 || packed
2704 || (TYPE_ALIGN (record_type) != 0
7a0ae4af 2705 && TYPE_ALIGN (record_type) < TYPE_ALIGN (type))))
27becfc8 2706 {
2707 DECL_BIT_FIELD (field_decl) = 1;
2708 DECL_SIZE (field_decl) = size;
2709 if (!packed && !pos)
4880a940 2710 {
2711 if (TYPE_ALIGN (record_type) != 0
7a0ae4af 2712 && TYPE_ALIGN (record_type) < TYPE_ALIGN (type))
5d4b30ea 2713 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (record_type));
4880a940 2714 else
5d4b30ea 2715 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (type));
4880a940 2716 }
27becfc8 2717 }
2718
2719 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
2720
2721 /* Bump the alignment if need be, either for bitfield/packing purposes or
2722 to satisfy the type requirements if no such consideration applies. When
2723 we get the alignment from the type, indicate if this is from an explicit
2724 user request, which prevents stor-layout from lowering it later on. */
2725 {
7cfdc2f0 2726 unsigned int bit_align
27becfc8 2727 = (DECL_BIT_FIELD (field_decl) ? 1
7a0ae4af 2728 : packed && TYPE_MODE (type) != BLKmode ? BITS_PER_UNIT : 0);
27becfc8 2729
2730 if (bit_align > DECL_ALIGN (field_decl))
5d4b30ea 2731 SET_DECL_ALIGN (field_decl, bit_align);
7a0ae4af 2732 else if (!bit_align && TYPE_ALIGN (type) > DECL_ALIGN (field_decl))
27becfc8 2733 {
5d4b30ea 2734 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (type));
7a0ae4af 2735 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (type);
27becfc8 2736 }
2737 }
2738
2739 if (pos)
2740 {
2741 /* We need to pass in the alignment the DECL is known to have.
2742 This is the lowest-order bit set in POS, but no more than
2743 the alignment of the record, if one is specified. Note
2744 that an alignment of 0 is taken as infinite. */
2745 unsigned int known_align;
2746
e913b5cd 2747 if (tree_fits_uhwi_p (pos))
2748 known_align = tree_to_uhwi (pos) & - tree_to_uhwi (pos);
27becfc8 2749 else
2750 known_align = BITS_PER_UNIT;
2751
2752 if (TYPE_ALIGN (record_type)
2753 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
2754 known_align = TYPE_ALIGN (record_type);
2755
2756 layout_decl (field_decl, known_align);
2757 SET_DECL_OFFSET_ALIGN (field_decl,
e913b5cd 2758 tree_fits_uhwi_p (pos) ? BIGGEST_ALIGNMENT
27becfc8 2759 : BITS_PER_UNIT);
2760 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
2761 &DECL_FIELD_BIT_OFFSET (field_decl),
2762 DECL_OFFSET_ALIGN (field_decl), pos);
27becfc8 2763 }
2764
2765 /* In addition to what our caller says, claim the field is addressable if we
2766 know that its type is not suitable.
2767
2768 The field may also be "technically" nonaddressable, meaning that even if
2769 we attempt to take the field's address we will actually get the address
2770 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
2771 value we have at this point is not accurate enough, so we don't account
2772 for this here and let finish_record_type decide. */
7a0ae4af 2773 if (!addressable && !type_for_nonaliased_component_p (type))
27becfc8 2774 addressable = 1;
2775
2776 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
2777
2778 return field_decl;
2779}
2780\f
6a1231a5 2781/* Return a PARM_DECL node with NAME and TYPE. */
27becfc8 2782
2783tree
6a1231a5 2784create_param_decl (tree name, tree type)
27becfc8 2785{
7a0ae4af 2786 tree param_decl = build_decl (input_location, PARM_DECL, name, type);
27becfc8 2787
a002cb99 2788 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
2789 can lead to various ABI violations. */
2790 if (targetm.calls.promote_prototypes (NULL_TREE)
7a0ae4af 2791 && INTEGRAL_TYPE_P (type)
2792 && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node))
27becfc8 2793 {
2794 /* We have to be careful about biased types here. Make a subtype
2795 of integer_type_node with the proper biasing. */
7a0ae4af 2796 if (TREE_CODE (type) == INTEGER_TYPE
2797 && TYPE_BIASED_REPRESENTATION_P (type))
27becfc8 2798 {
a9538d68 2799 tree subtype
2800 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
211df513 2801 TREE_TYPE (subtype) = integer_type_node;
2802 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
7a0ae4af 2803 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (type));
2804 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (type));
2805 type = subtype;
27becfc8 2806 }
2807 else
7a0ae4af 2808 type = integer_type_node;
27becfc8 2809 }
2810
7a0ae4af 2811 DECL_ARG_TYPE (param_decl) = type;
27becfc8 2812 return param_decl;
2813}
2814\f
081f18cf 2815/* Process the attributes in ATTR_LIST for NODE, which is either a DECL or
2816 a TYPE. If IN_PLACE is true, the tree pointed to by NODE should not be
2817 changed. GNAT_NODE is used for the position of error messages. */
27becfc8 2818
081f18cf 2819void
2820process_attributes (tree *node, struct attrib **attr_list, bool in_place,
2821 Node_Id gnat_node)
27becfc8 2822{
081f18cf 2823 struct attrib *attr;
2824
2825 for (attr = *attr_list; attr; attr = attr->next)
2826 switch (attr->type)
27becfc8 2827 {
2828 case ATTR_MACHINE_ATTRIBUTE:
081f18cf 2829 Sloc_to_locus (Sloc (gnat_node), &input_location);
2830 decl_attributes (node, tree_cons (attr->name, attr->args, NULL_TREE),
2831 in_place ? ATTR_FLAG_TYPE_IN_PLACE : 0);
27becfc8 2832 break;
2833
2834 case ATTR_LINK_ALIAS:
081f18cf 2835 if (!DECL_EXTERNAL (*node))
27becfc8 2836 {
081f18cf 2837 TREE_STATIC (*node) = 1;
2838 assemble_alias (*node, attr->name);
27becfc8 2839 }
2840 break;
2841
2842 case ATTR_WEAK_EXTERNAL:
2843 if (SUPPORTS_WEAK)
081f18cf 2844 declare_weak (*node);
27becfc8 2845 else
2846 post_error ("?weak declarations not supported on this target",
081f18cf 2847 attr->error_point);
27becfc8 2848 break;
2849
2850 case ATTR_LINK_SECTION:
218e3e4e 2851 if (targetm_common.have_named_sections)
27becfc8 2852 {
1a3c0b14 2853 set_decl_section_name (*node, IDENTIFIER_POINTER (attr->name));
081f18cf 2854 DECL_COMMON (*node) = 0;
27becfc8 2855 }
2856 else
2857 post_error ("?section attributes are not supported for this target",
081f18cf 2858 attr->error_point);
27becfc8 2859 break;
2860
2861 case ATTR_LINK_CONSTRUCTOR:
081f18cf 2862 DECL_STATIC_CONSTRUCTOR (*node) = 1;
2863 TREE_USED (*node) = 1;
27becfc8 2864 break;
2865
2866 case ATTR_LINK_DESTRUCTOR:
081f18cf 2867 DECL_STATIC_DESTRUCTOR (*node) = 1;
2868 TREE_USED (*node) = 1;
27becfc8 2869 break;
90d3e56e 2870
2871 case ATTR_THREAD_LOCAL_STORAGE:
5e68df57 2872 set_decl_tls_model (*node, decl_default_tls_model (*node));
081f18cf 2873 DECL_COMMON (*node) = 0;
90d3e56e 2874 break;
27becfc8 2875 }
081f18cf 2876
2877 *attr_list = NULL;
27becfc8 2878}
27becfc8 2879
2880/* Return true if VALUE is a known to be a multiple of FACTOR, which must be
2881 a power of 2. */
2882
2883bool
2884value_factor_p (tree value, HOST_WIDE_INT factor)
2885{
e913b5cd 2886 if (tree_fits_uhwi_p (value))
2887 return tree_to_uhwi (value) % factor == 0;
27becfc8 2888
2889 if (TREE_CODE (value) == MULT_EXPR)
2890 return (value_factor_p (TREE_OPERAND (value, 0), factor)
2891 || value_factor_p (TREE_OPERAND (value, 1), factor));
2892
2893 return false;
2894}
2895
11f3f0ba 2896/* Return whether GNAT_NODE is a defining identifier for a renaming that comes
2897 from the parameter association for the instantiation of a generic. We do
2898 not want to emit source location for them: the code generated for their
2899 initialization is likely to disturb debugging. */
2900
2901bool
2902renaming_from_generic_instantiation_p (Node_Id gnat_node)
2903{
2904 if (Nkind (gnat_node) != N_Defining_Identifier
2905 || !IN (Ekind (gnat_node), Object_Kind)
2906 || Comes_From_Source (gnat_node)
2907 || !Present (Renamed_Object (gnat_node)))
2908 return false;
2909
2910 /* Get the object declaration of the renamed object, if any and if the
2911 renamed object is a mere identifier. */
2912 gnat_node = Renamed_Object (gnat_node);
2913 if (Nkind (gnat_node) != N_Identifier)
2914 return false;
2915
2916 gnat_node = Entity (gnat_node);
2917 if (!Present (Parent (gnat_node)))
2918 return false;
2919
2920 gnat_node = Parent (gnat_node);
2921 return
2922 (Present (gnat_node)
2923 && Nkind (gnat_node) == N_Object_Declaration
2924 && Present (Corresponding_Generic_Association (gnat_node)));
2925}
2926
810e94f8 2927/* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
2928 feed it with the elaboration of GNAT_SCOPE. */
2929
2930static struct deferred_decl_context_node *
2931add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global)
2932{
2933 struct deferred_decl_context_node *new_node;
2934
2935 new_node
2936 = (struct deferred_decl_context_node * ) xmalloc (sizeof (*new_node));
2937 new_node->decl = decl;
2938 new_node->gnat_scope = gnat_scope;
2939 new_node->force_global = force_global;
2940 new_node->types.create (1);
2941 new_node->next = deferred_decl_context_queue;
2942 deferred_decl_context_queue = new_node;
2943 return new_node;
2944}
2945
2946/* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
2947 feed it with the DECL_CONTEXT computed as part of N as soon as it is
2948 computed. */
2949
2950static void
2951add_deferred_type_context (struct deferred_decl_context_node *n, tree type)
2952{
2953 n->types.safe_push (type);
2954}
2955
2956/* Get the GENERIC node corresponding to GNAT_SCOPE, if available. Return
2957 NULL_TREE if it is not available. */
2958
2959static tree
2960compute_deferred_decl_context (Entity_Id gnat_scope)
2961{
2962 tree context;
2963
2964 if (present_gnu_tree (gnat_scope))
2965 context = get_gnu_tree (gnat_scope);
2966 else
2967 return NULL_TREE;
2968
2969 if (TREE_CODE (context) == TYPE_DECL)
2970 {
2971 const tree context_type = TREE_TYPE (context);
2972
2973 /* Skip dummy types: only the final ones can appear in the context
2974 chain. */
2975 if (TYPE_DUMMY_P (context_type))
2976 return NULL_TREE;
2977
2978 /* ..._TYPE nodes are more useful than TYPE_DECL nodes in the context
2979 chain. */
2980 else
2981 context = context_type;
2982 }
2983
2984 return context;
2985}
2986
2987/* Try to process all deferred nodes in the queue. Keep in the queue the ones
2988 that cannot be processed yet, remove the other ones. If FORCE is true,
2989 force the processing for all nodes, use the global context when nodes don't
2990 have a GNU translation. */
2991
2992void
2993process_deferred_decl_context (bool force)
2994{
2995 struct deferred_decl_context_node **it = &deferred_decl_context_queue;
2996 struct deferred_decl_context_node *node;
2997
c7083650 2998 while (*it)
810e94f8 2999 {
3000 bool processed = false;
3001 tree context = NULL_TREE;
3002 Entity_Id gnat_scope;
3003
3004 node = *it;
3005
c7083650 3006 /* If FORCE, get the innermost elaborated scope. Otherwise, just try to
810e94f8 3007 get the first scope. */
3008 gnat_scope = node->gnat_scope;
3009 while (Present (gnat_scope))
3010 {
3011 context = compute_deferred_decl_context (gnat_scope);
ea780bd9 3012 if (!force || context)
810e94f8 3013 break;
3014 gnat_scope = get_debug_scope (gnat_scope, NULL);
3015 }
3016
3017 /* Imported declarations must not be in a local context (i.e. not inside
3018 a function). */
ea780bd9 3019 if (context && node->force_global > 0)
810e94f8 3020 {
3021 tree ctx = context;
3022
ea780bd9 3023 while (ctx)
810e94f8 3024 {
3025 gcc_assert (TREE_CODE (ctx) != FUNCTION_DECL);
ea780bd9 3026 ctx = DECL_P (ctx) ? DECL_CONTEXT (ctx) : TYPE_CONTEXT (ctx);
810e94f8 3027 }
3028 }
3029
3030 /* If FORCE, we want to get rid of all nodes in the queue: in case there
3031 was no elaborated scope, use the global context. */
ea780bd9 3032 if (force && !context)
810e94f8 3033 context = get_global_context ();
3034
ea780bd9 3035 if (context)
810e94f8 3036 {
3037 tree t;
3038 int i;
3039
3040 DECL_CONTEXT (node->decl) = context;
3041
3042 /* Propagate it to the TYPE_CONTEXT attributes of the requested
3043 ..._TYPE nodes. */
3044 FOR_EACH_VEC_ELT (node->types, i, t)
3045 {
ba502e2b 3046 gnat_set_type_context (t, context);
810e94f8 3047 }
3048 processed = true;
3049 }
3050
3051 /* If this node has been successfuly processed, remove it from the
3052 queue. Then move to the next node. */
3053 if (processed)
3054 {
3055 *it = node->next;
3056 node->types.release ();
3057 free (node);
3058 }
3059 else
3060 it = &node->next;
3061 }
3062}
3063
819ab09a 3064/* Return VALUE scaled by the biggest power-of-2 factor of EXPR. */
3065
3066static unsigned int
3067scale_by_factor_of (tree expr, unsigned int value)
3068{
4a53d91c 3069 unsigned HOST_WIDE_INT addend = 0;
3070 unsigned HOST_WIDE_INT factor = 1;
3071
3072 /* Peel conversions around EXPR and try to extract bodies from function
3073 calls: it is possible to get the scale factor from size functions. */
819ab09a 3074 expr = remove_conversions (expr, true);
4a53d91c 3075 if (TREE_CODE (expr) == CALL_EXPR)
3076 expr = maybe_inline_call_in_expr (expr);
3077
3078 /* Sometimes we get PLUS_EXPR (BIT_AND_EXPR (..., X), Y), where Y is a
3079 multiple of the scale factor we are looking for. */
3080 if (TREE_CODE (expr) == PLUS_EXPR
3081 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
3082 && tree_fits_uhwi_p (TREE_OPERAND (expr, 1)))
3083 {
3084 addend = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
3085 expr = TREE_OPERAND (expr, 0);
3086 }
819ab09a 3087
3088 /* An expression which is a bitwise AND with a mask has a power-of-2 factor
3089 corresponding to the number of trailing zeros of the mask. */
3090 if (TREE_CODE (expr) == BIT_AND_EXPR
3091 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST)
3092 {
f9ae6f95 3093 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
819ab09a 3094 unsigned int i = 0;
3095
3096 while ((mask & 1) == 0 && i < HOST_BITS_PER_WIDE_INT)
3097 {
3098 mask >>= 1;
4a53d91c 3099 factor *= 2;
819ab09a 3100 i++;
3101 }
3102 }
3103
4a53d91c 3104 /* If the addend is not a multiple of the factor we found, give up. In
3105 theory we could find a smaller common factor but it's useless for our
3106 needs. This situation arises when dealing with a field F1 with no
3107 alignment requirement but that is following a field F2 with such
3108 requirements. As long as we have F2's offset, we don't need alignment
3109 information to compute F1's. */
3110 if (addend % factor != 0)
3111 factor = 1;
3112
3113 return factor * value;
819ab09a 3114}
3115
2cb54d0d 3116/* Given two consecutive field decls PREV_FIELD and CURR_FIELD, return true
27becfc8 3117 unless we can prove these 2 fields are laid out in such a way that no gap
3118 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
3119 is the distance in bits between the end of PREV_FIELD and the starting
3120 position of CURR_FIELD. It is ignored if null. */
3121
3122static bool
3123potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
3124{
3125 /* If this is the first field of the record, there cannot be any gap */
3126 if (!prev_field)
3127 return false;
3128
c6ac288c 3129 /* If the previous field is a union type, then return false: The only
27becfc8 3130 time when such a field is not the last field of the record is when
3131 there are other components at fixed positions after it (meaning there
3132 was a rep clause for every field), in which case we don't want the
3133 alignment constraint to override them. */
3134 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
3135 return false;
3136
3137 /* If the distance between the end of prev_field and the beginning of
3138 curr_field is constant, then there is a gap if the value of this
3139 constant is not null. */
e913b5cd 3140 if (offset && tree_fits_uhwi_p (offset))
27becfc8 3141 return !integer_zerop (offset);
3142
3143 /* If the size and position of the previous field are constant,
3144 then check the sum of this size and position. There will be a gap
3145 iff it is not multiple of the current field alignment. */
e913b5cd 3146 if (tree_fits_uhwi_p (DECL_SIZE (prev_field))
3147 && tree_fits_uhwi_p (bit_position (prev_field)))
3148 return ((tree_to_uhwi (bit_position (prev_field))
3149 + tree_to_uhwi (DECL_SIZE (prev_field)))
27becfc8 3150 % DECL_ALIGN (curr_field) != 0);
3151
3152 /* If both the position and size of the previous field are multiples
3153 of the current field alignment, there cannot be any gap. */
3154 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
3155 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
3156 return false;
3157
3158 /* Fallback, return that there may be a potential gap */
3159 return true;
3160}
3161
7a0ae4af 3162/* Return a LABEL_DECL with NAME. GNAT_NODE is used for the position of
3163 the decl. */
27becfc8 3164
3165tree
7a0ae4af 3166create_label_decl (tree name, Node_Id gnat_node)
27becfc8 3167{
7e9d30ae 3168 tree label_decl
7a0ae4af 3169 = build_decl (input_location, LABEL_DECL, name, void_type_node);
27becfc8 3170
adc78298 3171 SET_DECL_MODE (label_decl, VOIDmode);
7e9d30ae 3172
3173 /* Add this decl to the current binding level. */
3174 gnat_pushdecl (label_decl, gnat_node);
27becfc8 3175
3176 return label_decl;
3177}
3178\f
7a0ae4af 3179/* Return a FUNCTION_DECL node. NAME is the name of the subprogram, ASM_NAME
3180 its assembler name, TYPE its type (a FUNCTION_TYPE node), PARAM_DECL_LIST
3181 the list of its parameters (a list of PARM_DECL nodes chained through the
3182 DECL_CHAIN field).
27becfc8 3183
c4e63392 3184 INLINE_STATUS describes the inline flags to be set on the FUNCTION_DECL.
3185
6a1231a5 3186 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
3187 definition to be made visible outside of the current compilation unit.
3188
3189 EXTERN_FLAG is true when processing an external subprogram declaration.
4905002b 3190
3191 ARTIFICIAL_P is true if the subprogram was generated by the compiler.
3192
3193 DEBUG_INFO_P is true if we need to write debug information for it.
3194
288405ec 3195 DEFINITION is true if the subprogram is to be considered as a definition.
3196
c4e63392 3197 ATTR_LIST is the list of attributes to be attached to the subprogram.
3198
4905002b 3199 GNAT_NODE is used for the position of the decl. */
27becfc8 3200
3201tree
7a0ae4af 3202create_subprog_decl (tree name, tree asm_name, tree type, tree param_decl_list,
6a1231a5 3203 enum inline_status_t inline_status, bool public_flag,
3204 bool extern_flag, bool artificial_p, bool debug_info_p,
288405ec 3205 bool definition, struct attrib *attr_list,
3206 Node_Id gnat_node)
27becfc8 3207{
7a0ae4af 3208 tree subprog_decl = build_decl (input_location, FUNCTION_DECL, name, type);
2cb54d0d 3209 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
27becfc8 3210
4905002b 3211 DECL_ARTIFICIAL (subprog_decl) = artificial_p;
2cb54d0d 3212 DECL_EXTERNAL (subprog_decl) = extern_flag;
6a1231a5 3213 TREE_PUBLIC (subprog_decl) = public_flag;
3214
3215 if (!debug_info_p)
3216 DECL_IGNORED_P (subprog_decl) = 1;
288405ec 3217 if (definition)
3218 DECL_FUNCTION_IS_DEF (subprog_decl) = 1;
00b45d95 3219
3220 switch (inline_status)
3221 {
3222 case is_suppressed:
3223 DECL_UNINLINABLE (subprog_decl) = 1;
3224 break;
3225
3226 case is_disabled:
3227 break;
3228
9fac98bb 3229 case is_required:
3230 if (Back_End_Inlining)
f1d18beb 3231 {
3232 decl_attributes (&subprog_decl,
3233 tree_cons (get_identifier ("always_inline"),
3234 NULL_TREE, NULL_TREE),
3235 ATTR_FLAG_TYPE_IN_PLACE);
3236
3237 /* Inline_Always guarantees that every direct call is inlined and
3238 that there is no indirect reference to the subprogram, so the
3239 instance in the original package (as well as its clones in the
3240 client packages created for inter-unit inlining) can be made
3241 private, which causes the out-of-line body to be eliminated. */
3242 TREE_PUBLIC (subprog_decl) = 0;
3243 }
6fa4bf38 3244
b7066486 3245 /* ... fall through ... */
9fac98bb 3246
00b45d95 3247 case is_enabled:
3248 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
4905002b 3249 DECL_NO_INLINE_WARNING_P (subprog_decl) = artificial_p;
00b45d95 3250 break;
3251
3252 default:
3253 gcc_unreachable ();
3254 }
2cb54d0d 3255
7ebb8c82 3256 process_attributes (&subprog_decl, &attr_list, true, gnat_node);
3257
2ff1f477 3258 /* Once everything is processed, finish the subprogram declaration. */
3259 finish_subprog_decl (subprog_decl, asm_name, type);
3260
7ebb8c82 3261 /* Add this decl to the current binding level. */
3262 gnat_pushdecl (subprog_decl, gnat_node);
3263
27becfc8 3264 /* Output the assembler code and/or RTL for the declaration. */
3265 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
3266
3267 return subprog_decl;
3268}
6a1231a5 3269
2ff1f477 3270/* Given a subprogram declaration DECL, its assembler name and its type,
3271 finish constructing the subprogram declaration from ASM_NAME and TYPE. */
6a1231a5 3272
3273void
2ff1f477 3274finish_subprog_decl (tree decl, tree asm_name, tree type)
6a1231a5 3275{
3276 tree result_decl
3277 = build_decl (DECL_SOURCE_LOCATION (decl), RESULT_DECL, NULL_TREE,
3278 TREE_TYPE (type));
3279
3280 DECL_ARTIFICIAL (result_decl) = 1;
3281 DECL_IGNORED_P (result_decl) = 1;
3282 DECL_BY_REFERENCE (result_decl) = TREE_ADDRESSABLE (type);
3283 DECL_RESULT (decl) = result_decl;
3284
3285 TREE_READONLY (decl) = TYPE_READONLY (type);
3286 TREE_SIDE_EFFECTS (decl) = TREE_THIS_VOLATILE (decl) = TYPE_VOLATILE (type);
2ff1f477 3287
3288 if (asm_name)
3289 {
3290 /* Let the target mangle the name if this isn't a verbatim asm. */
3291 if (*IDENTIFIER_POINTER (asm_name) != '*')
3292 asm_name = targetm.mangle_decl_assembler_name (decl, asm_name);
3293
3294 SET_DECL_ASSEMBLER_NAME (decl, asm_name);
3295
3296 /* The expand_main_function circuitry expects "main_identifier_node" to
3297 designate the DECL_NAME of the 'main' entry point, in turn expected
3298 to be declared as the "main" function literally by default. Ada
3299 program entry points are typically declared with a different name
3300 within the binder generated file, exported as 'main' to satisfy the
3301 system expectations. Force main_identifier_node in this case. */
3302 if (asm_name == main_identifier_node)
3303 DECL_NAME (decl) = main_identifier_node;
3304 }
6a1231a5 3305}
27becfc8 3306\f
3307/* Set up the framework for generating code for SUBPROG_DECL, a subprogram
3308 body. This routine needs to be invoked before processing the declarations
3309 appearing in the subprogram. */
3310
3311void
3312begin_subprog_body (tree subprog_decl)
3313{
3314 tree param_decl;
3315
27becfc8 3316 announce_function (subprog_decl);
3317
aaaf92e8 3318 /* This function is being defined. */
3319 TREE_STATIC (subprog_decl) = 1;
3320
7a4047d6 3321 /* The failure of this assertion will likely come from a wrong context for
3322 the subprogram body, e.g. another procedure for a procedure declared at
3323 library level. */
3324 gcc_assert (current_function_decl == decl_function_context (subprog_decl));
3325
ac45dde2 3326 current_function_decl = subprog_decl;
3327
27becfc8 3328 /* Enter a new binding level and show that all the parameters belong to
3329 this function. */
3330 gnat_pushlevel ();
db72a63f 3331
27becfc8 3332 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
1767a056 3333 param_decl = DECL_CHAIN (param_decl))
27becfc8 3334 DECL_CONTEXT (param_decl) = subprog_decl;
3335
3336 make_decl_rtl (subprog_decl);
27becfc8 3337}
3338
c97bd6af 3339/* Finish translating the current subprogram and set its BODY. */
27becfc8 3340
3341void
bfec3452 3342end_subprog_body (tree body)
27becfc8 3343{
3344 tree fndecl = current_function_decl;
3345
a616b634 3346 /* Attach the BLOCK for this level to the function and pop the level. */
27becfc8 3347 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
3348 DECL_INITIAL (fndecl) = current_binding_level->block;
3349 gnat_poplevel ();
3350
27becfc8 3351 /* Mark the RESULT_DECL as being in this subprogram. */
3352 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
3353
04dd9724 3354 /* The body should be a BIND_EXPR whose BLOCK is the top-level one. */
3355 if (TREE_CODE (body) == BIND_EXPR)
3356 {
3357 BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body)) = fndecl;
3358 DECL_INITIAL (fndecl) = BIND_EXPR_BLOCK (body);
3359 }
3360
27becfc8 3361 DECL_SAVED_TREE (fndecl) = body;
3362
eac916f9 3363 current_function_decl = decl_function_context (fndecl);
c97bd6af 3364}
3365
3366/* Wrap up compilation of SUBPROG_DECL, a subprogram body. */
27becfc8 3367
c97bd6af 3368void
3369rest_of_subprog_body_compilation (tree subprog_decl)
3370{
27becfc8 3371 /* We cannot track the location of errors past this point. */
3372 error_gnat_node = Empty;
3373
3374 /* If we're only annotating types, don't actually compile this function. */
3375 if (type_annotate_only)
3376 return;
3377
bfec3452 3378 /* Dump functions before gimplification. */
c97bd6af 3379 dump_function (TDI_original, subprog_decl);
bfec3452 3380
eac916f9 3381 if (!decl_function_context (subprog_decl))
35ee1c66 3382 cgraph_node::finalize_function (subprog_decl, false);
27becfc8 3383 else
3384 /* Register this function with cgraph just far enough to get it
3385 added to our parent's nested function list. */
727d4825 3386 (void) cgraph_node::get_create (subprog_decl);
27becfc8 3387}
3388
27becfc8 3389tree
3390gnat_builtin_function (tree decl)
3391{
3392 gnat_pushdecl (decl, Empty);
3393 return decl;
3394}
3395
3396/* Return an integer type with the number of bits of precision given by
3397 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
3398 it is a signed type. */
3399
3400tree
3401gnat_type_for_size (unsigned precision, int unsignedp)
3402{
3403 tree t;
3404 char type_name[20];
3405
3406 if (precision <= 2 * MAX_BITS_PER_WORD
3407 && signed_and_unsigned_types[precision][unsignedp])
3408 return signed_and_unsigned_types[precision][unsignedp];
3409
3410 if (unsignedp)
3411 t = make_unsigned_type (precision);
3412 else
3413 t = make_signed_type (precision);
16b68428 3414 TYPE_ARTIFICIAL (t) = 1;
27becfc8 3415
3416 if (precision <= 2 * MAX_BITS_PER_WORD)
3417 signed_and_unsigned_types[precision][unsignedp] = t;
3418
3419 if (!TYPE_NAME (t))
3420 {
84f43acc 3421 sprintf (type_name, "%sSIGNED_%u", unsignedp ? "UN" : "", precision);
27becfc8 3422 TYPE_NAME (t) = get_identifier (type_name);
3423 }
3424
3425 return t;
3426}
3427
3428/* Likewise for floating-point types. */
3429
3430static tree
3754d046 3431float_type_for_precision (int precision, machine_mode mode)
27becfc8 3432{
3433 tree t;
3434 char type_name[20];
3435
3436 if (float_types[(int) mode])
3437 return float_types[(int) mode];
3438
3439 float_types[(int) mode] = t = make_node (REAL_TYPE);
3440 TYPE_PRECISION (t) = precision;
3441 layout_type (t);
3442
3443 gcc_assert (TYPE_MODE (t) == mode);
3444 if (!TYPE_NAME (t))
3445 {
3446 sprintf (type_name, "FLOAT_%d", precision);
3447 TYPE_NAME (t) = get_identifier (type_name);
3448 }
3449
3450 return t;
3451}
3452
3453/* Return a data type that has machine mode MODE. UNSIGNEDP selects
3454 an unsigned type; otherwise a signed type is returned. */
3455
3456tree
3754d046 3457gnat_type_for_mode (machine_mode mode, int unsignedp)
27becfc8 3458{
3459 if (mode == BLKmode)
3460 return NULL_TREE;
21100898 3461
3462 if (mode == VOIDmode)
27becfc8 3463 return void_type_node;
21100898 3464
3465 if (COMPLEX_MODE_P (mode))
27becfc8 3466 return NULL_TREE;
21100898 3467
47fbdc12 3468 scalar_float_mode float_mode;
3469 if (is_a <scalar_float_mode> (mode, &float_mode))
3470 return float_type_for_precision (GET_MODE_PRECISION (float_mode),
3471 float_mode);
21100898 3472
8974b7a3 3473 scalar_int_mode int_mode;
3474 if (is_a <scalar_int_mode> (mode, &int_mode))
3475 return gnat_type_for_size (GET_MODE_BITSIZE (int_mode), unsignedp);
21100898 3476
3477 if (VECTOR_MODE_P (mode))
3478 {
3754d046 3479 machine_mode inner_mode = GET_MODE_INNER (mode);
21100898 3480 tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
3481 if (inner_type)
3482 return build_vector_type_for_mode (inner_type, mode);
3483 }
3484
3485 return NULL_TREE;
27becfc8 3486}
3487
0353d27b 3488/* Return the signed or unsigned version of TYPE_NODE, a scalar type, the
3489 signedness being specified by UNSIGNEDP. */
27becfc8 3490
3491tree
0353d27b 3492gnat_signed_or_unsigned_type_for (int unsignedp, tree type_node)
27becfc8 3493{
96536a9d 3494 if (type_node == char_type_node)
3495 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
3496
0353d27b 3497 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), unsignedp);
27becfc8 3498
3499 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
3500 {
baaf92dc 3501 type = copy_type (type);
27becfc8 3502 TREE_TYPE (type) = type_node;
3503 }
3504 else if (TREE_TYPE (type_node)
3505 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
3506 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
3507 {
baaf92dc 3508 type = copy_type (type);
27becfc8 3509 TREE_TYPE (type) = TREE_TYPE (type_node);
3510 }
3511
3512 return type;
3513}
3514
3515/* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
3516 transparently converted to each other. */
3517
3518int
3519gnat_types_compatible_p (tree t1, tree t2)
3520{
3521 enum tree_code code;
3522
3523 /* This is the default criterion. */
3524 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
3525 return 1;
3526
3527 /* We only check structural equivalence here. */
3528 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
3529 return 0;
3530
52dd2567 3531 /* Vector types are also compatible if they have the same number of subparts
3532 and the same form of (scalar) element type. */
3533 if (code == VECTOR_TYPE
3534 && TYPE_VECTOR_SUBPARTS (t1) == TYPE_VECTOR_SUBPARTS (t2)
3535 && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
3536 && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
3537 return 1;
3538
52a22bc6 3539 /* Array types are also compatible if they are constrained and have the same
292237f3 3540 domain(s), the same component type and the same scalar storage order. */
27becfc8 3541 if (code == ARRAY_TYPE
686353fe 3542 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
3543 || (TYPE_DOMAIN (t1)
153edb51 3544 && TYPE_DOMAIN (t2)
686353fe 3545 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
3546 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
3547 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
52a22bc6 3548 TYPE_MAX_VALUE (TYPE_DOMAIN (t2)))))
541afe39 3549 && (TREE_TYPE (t1) == TREE_TYPE (t2)
3550 || (TREE_CODE (TREE_TYPE (t1)) == ARRAY_TYPE
292237f3 3551 && gnat_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))))
3552 && TYPE_REVERSE_STORAGE_ORDER (t1) == TYPE_REVERSE_STORAGE_ORDER (t2))
27becfc8 3553 return 1;
3554
27becfc8 3555 return 0;
3556}
47c154d9 3557
c97bd6af 3558/* Return true if EXPR is a useless type conversion. */
3559
3560bool
3561gnat_useless_type_conversion (tree expr)
3562{
3563 if (CONVERT_EXPR_P (expr)
3564 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
3565 || TREE_CODE (expr) == NON_LVALUE_EXPR)
3566 return gnat_types_compatible_p (TREE_TYPE (expr),
3567 TREE_TYPE (TREE_OPERAND (expr, 0)));
3568
3569 return false;
3570}
3571
47c154d9 3572/* Return true if T, a FUNCTION_TYPE, has the specified list of flags. */
3573
3574bool
3575fntype_same_flags_p (const_tree t, tree cico_list, bool return_unconstrained_p,
3576 bool return_by_direct_ref_p, bool return_by_invisi_ref_p)
3577{
3578 return TYPE_CI_CO_LIST (t) == cico_list
3579 && TYPE_RETURN_UNCONSTRAINED_P (t) == return_unconstrained_p
3580 && TYPE_RETURN_BY_DIRECT_REF_P (t) == return_by_direct_ref_p
3581 && TREE_ADDRESSABLE (t) == return_by_invisi_ref_p;
3582}
27becfc8 3583\f
3584/* EXP is an expression for the size of an object. If this size contains
3585 discriminant references, replace them with the maximum (if MAX_P) or
3586 minimum (if !MAX_P) possible value of the discriminant. */
3587
3588tree
3589max_size (tree exp, bool max_p)
3590{
3591 enum tree_code code = TREE_CODE (exp);
3592 tree type = TREE_TYPE (exp);
17b7eb98 3593 tree op0, op1, op2;
27becfc8 3594
3595 switch (TREE_CODE_CLASS (code))
3596 {
3597 case tcc_declaration:
3598 case tcc_constant:
3599 return exp;
3600
3601 case tcc_vl_exp:
3602 if (code == CALL_EXPR)
3603 {
4189e677 3604 tree t, *argarray;
3605 int n, i;
3606
3607 t = maybe_inline_call_in_expr (exp);
3608 if (t)
3609 return max_size (t, max_p);
27becfc8 3610
4189e677 3611 n = call_expr_nargs (exp);
3612 gcc_assert (n > 0);
bef91bcb 3613 argarray = XALLOCAVEC (tree, n);
27becfc8 3614 for (i = 0; i < n; i++)
3615 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
3616 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
3617 }
3618 break;
3619
3620 case tcc_reference:
3621 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
3622 modify. Otherwise, we treat it like a variable. */
6fa4bf38 3623 if (CONTAINS_PLACEHOLDER_P (exp))
3624 {
3625 tree val_type = TREE_TYPE (TREE_OPERAND (exp, 1));
3626 tree val = (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type));
6e92a788 3627 return
3628 convert (type,
3629 max_size (convert (get_base_type (val_type), val), true));
6fa4bf38 3630 }
27becfc8 3631
6fa4bf38 3632 return exp;
27becfc8 3633
3634 case tcc_comparison:
17b7eb98 3635 return build_int_cst (type, max_p ? 1 : 0);
27becfc8 3636
3637 case tcc_unary:
97658fc9 3638 if (code == NON_LVALUE_EXPR)
3639 return max_size (TREE_OPERAND (exp, 0), max_p);
4df502f9 3640
17b7eb98 3641 op0 = max_size (TREE_OPERAND (exp, 0),
3642 code == NEGATE_EXPR ? !max_p : max_p);
3643
3644 if (op0 == TREE_OPERAND (exp, 0))
3645 return exp;
3646
3647 return fold_build1 (code, type, op0);
97658fc9 3648
27becfc8 3649 case tcc_binary:
97658fc9 3650 {
3651 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
3652 tree rhs = max_size (TREE_OPERAND (exp, 1),
3653 code == MINUS_EXPR ? !max_p : max_p);
3654
3655 /* Special-case wanting the maximum value of a MIN_EXPR.
3656 In that case, if one side overflows, return the other. */
3657 if (max_p && code == MIN_EXPR)
3658 {
3659 if (TREE_CODE (rhs) == INTEGER_CST && TREE_OVERFLOW (rhs))
3660 return lhs;
3661
3662 if (TREE_CODE (lhs) == INTEGER_CST && TREE_OVERFLOW (lhs))
3663 return rhs;
3664 }
3665
3666 /* Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
3667 overflowing and the RHS a variable. */
3668 if ((code == MINUS_EXPR || code == PLUS_EXPR)
3669 && TREE_CODE (lhs) == INTEGER_CST
3670 && TREE_OVERFLOW (lhs)
4c3f1f54 3671 && TREE_CODE (rhs) != INTEGER_CST)
97658fc9 3672 return lhs;
3673
4c3f1f54 3674 /* If we are going to subtract a "negative" value in an unsigned type,
3675 do the operation as an addition of the negated value, in order to
3676 avoid creating a spurious overflow below. */
3677 if (code == MINUS_EXPR
3678 && TYPE_UNSIGNED (type)
3679 && TREE_CODE (rhs) == INTEGER_CST
3680 && !TREE_OVERFLOW (rhs)
3681 && tree_int_cst_sign_bit (rhs) != 0)
3682 {
3683 rhs = fold_build1 (NEGATE_EXPR, type, rhs);
3684 code = PLUS_EXPR;
3685 }
3686
17b7eb98 3687 if (lhs == TREE_OPERAND (exp, 0) && rhs == TREE_OPERAND (exp, 1))
3688 return exp;
3689
4c3f1f54 3690 /* We need to detect overflows so we call size_binop here. */
97658fc9 3691 return size_binop (code, lhs, rhs);
3692 }
3693
27becfc8 3694 case tcc_expression:
3695 switch (TREE_CODE_LENGTH (code))
3696 {
3697 case 1:
7ad4f11d 3698 if (code == SAVE_EXPR)
3699 return exp;
97658fc9 3700
17b7eb98 3701 op0 = max_size (TREE_OPERAND (exp, 0),
3702 code == TRUTH_NOT_EXPR ? !max_p : max_p);
3703
3704 if (op0 == TREE_OPERAND (exp, 0))
3705 return exp;
3706
3707 return fold_build1 (code, type, op0);
27becfc8 3708
3709 case 2:
3710 if (code == COMPOUND_EXPR)
3711 return max_size (TREE_OPERAND (exp, 1), max_p);
3712
17b7eb98 3713 op0 = max_size (TREE_OPERAND (exp, 0), max_p);
3714 op1 = max_size (TREE_OPERAND (exp, 1), max_p);
3715
3716 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3717 return exp;
3718
3719 return fold_build2 (code, type, op0, op1);
27becfc8 3720
3721 case 3:
7ad4f11d 3722 if (code == COND_EXPR)
17b7eb98 3723 {
3724 op1 = TREE_OPERAND (exp, 1);
3725 op2 = TREE_OPERAND (exp, 2);
3726
3727 if (!op1 || !op2)
3728 return exp;
3729
3730 return
3731 fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
3732 max_size (op1, max_p), max_size (op2, max_p));
3733 }
3734 break;
97658fc9 3735
3736 default:
3737 break;
27becfc8 3738 }
3739
3740 /* Other tree classes cannot happen. */
3741 default:
3742 break;
3743 }
3744
3745 gcc_unreachable ();
3746}
3747\f
3748/* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
3749 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
3750 Return a constructor for the template. */
3751
3752tree
3753build_template (tree template_type, tree array_type, tree expr)
3754{
f1f41a6c 3755 vec<constructor_elt, va_gc> *template_elts = NULL;
27becfc8 3756 tree bound_list = NULL_TREE;
3757 tree field;
3758
3759 while (TREE_CODE (array_type) == RECORD_TYPE
a98f6bec 3760 && (TYPE_PADDING_P (array_type)
27becfc8 3761 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
3762 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
3763
3764 if (TREE_CODE (array_type) == ARRAY_TYPE
3765 || (TREE_CODE (array_type) == INTEGER_TYPE
3766 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
3767 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
3768
3769 /* First make the list for a CONSTRUCTOR for the template. Go down the
3770 field list of the template instead of the type chain because this
3771 array might be an Ada array of arrays and we can't tell where the
3772 nested arrays stop being the underlying object. */
3773
3774 for (field = TYPE_FIELDS (template_type); field;
3775 (bound_list
3776 ? (bound_list = TREE_CHAIN (bound_list))
3777 : (array_type = TREE_TYPE (array_type))),
1767a056 3778 field = DECL_CHAIN (DECL_CHAIN (field)))
27becfc8 3779 {
3780 tree bounds, min, max;
3781
3782 /* If we have a bound list, get the bounds from there. Likewise
3783 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
3784 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
3785 This will give us a maximum range. */
3786 if (bound_list)
3787 bounds = TREE_VALUE (bound_list);
3788 else if (TREE_CODE (array_type) == ARRAY_TYPE)
3789 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
3790 else if (expr && TREE_CODE (expr) == PARM_DECL
3791 && DECL_BY_COMPONENT_PTR_P (expr))
3792 bounds = TREE_TYPE (field);
3793 else
3794 gcc_unreachable ();
3795
3796 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
1767a056 3797 max = convert (TREE_TYPE (DECL_CHAIN (field)), TYPE_MAX_VALUE (bounds));
27becfc8 3798
3799 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
3800 substitute it from OBJECT. */
3801 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
3802 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
3803
5cd86571 3804 CONSTRUCTOR_APPEND_ELT (template_elts, field, min);
1767a056 3805 CONSTRUCTOR_APPEND_ELT (template_elts, DECL_CHAIN (field), max);
27becfc8 3806 }
3807
5cd86571 3808 return gnat_build_constructor (template_type, template_elts);
27becfc8 3809}
3810\f
fc07fe6f 3811/* Return true if TYPE is suitable for the element type of a vector. */
3812
3813static bool
3814type_for_vector_element_p (tree type)
3815{
3754d046 3816 machine_mode mode;
fc07fe6f 3817
3818 if (!INTEGRAL_TYPE_P (type)
3819 && !SCALAR_FLOAT_TYPE_P (type)
3820 && !FIXED_POINT_TYPE_P (type))
3821 return false;
3822
3823 mode = TYPE_MODE (type);
3824 if (GET_MODE_CLASS (mode) != MODE_INT
3825 && !SCALAR_FLOAT_MODE_P (mode)
3826 && !ALL_SCALAR_FIXED_POINT_MODE_P (mode))
3827 return false;
3828
3829 return true;
3830}
3831
3832/* Return a vector type given the SIZE and the INNER_TYPE, or NULL_TREE if
3833 this is not possible. If ATTRIBUTE is non-zero, we are processing the
3834 attribute declaration and want to issue error messages on failure. */
3835
3836static tree
3837build_vector_type_for_size (tree inner_type, tree size, tree attribute)
3838{
3839 unsigned HOST_WIDE_INT size_int, inner_size_int;
3840 int nunits;
3841
3842 /* Silently punt on variable sizes. We can't make vector types for them,
3843 need to ignore them on front-end generated subtypes of unconstrained
3844 base types, and this attribute is for binding implementors, not end
3845 users, so we should never get there from legitimate explicit uses. */
3846 if (!tree_fits_uhwi_p (size))
3847 return NULL_TREE;
3848 size_int = tree_to_uhwi (size);
3849
3850 if (!type_for_vector_element_p (inner_type))
3851 {
3852 if (attribute)
3853 error ("invalid element type for attribute %qs",
3854 IDENTIFIER_POINTER (attribute));
3855 return NULL_TREE;
3856 }
3857 inner_size_int = tree_to_uhwi (TYPE_SIZE_UNIT (inner_type));
3858
3859 if (size_int % inner_size_int)
3860 {
3861 if (attribute)
3862 error ("vector size not an integral multiple of component size");
3863 return NULL_TREE;
3864 }
3865
3866 if (size_int == 0)
3867 {
3868 if (attribute)
3869 error ("zero vector size");
3870 return NULL_TREE;
3871 }
3872
3873 nunits = size_int / inner_size_int;
3874 if (nunits & (nunits - 1))
3875 {
3876 if (attribute)
3877 error ("number of components of vector not a power of two");
3878 return NULL_TREE;
3879 }
3880
3881 return build_vector_type (inner_type, nunits);
3882}
3883
3884/* Return a vector type whose representative array type is ARRAY_TYPE, or
3885 NULL_TREE if this is not possible. If ATTRIBUTE is non-zero, we are
3886 processing the attribute and want to issue error messages on failure. */
3887
3888static tree
3889build_vector_type_for_array (tree array_type, tree attribute)
3890{
3891 tree vector_type = build_vector_type_for_size (TREE_TYPE (array_type),
3892 TYPE_SIZE_UNIT (array_type),
3893 attribute);
3894 if (!vector_type)
3895 return NULL_TREE;
3896
3897 TYPE_REPRESENTATIVE_ARRAY (vector_type) = array_type;
3898 return vector_type;
3899}
3900\f
8c77dd48 3901/* Build a type to be used to represent an aliased object whose nominal type
3902 is an unconstrained array. This consists of a RECORD_TYPE containing a
3903 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
3904 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
3905 an arbitrary unconstrained object. Use NAME as the name of the record.
3906 DEBUG_INFO_P is true if we need to write debug information for the type. */
27becfc8 3907
3908tree
8c77dd48 3909build_unc_object_type (tree template_type, tree object_type, tree name,
3910 bool debug_info_p)
27becfc8 3911{
ba502e2b 3912 tree decl;
27becfc8 3913 tree type = make_node (RECORD_TYPE);
d51eba1a 3914 tree template_field
3915 = create_field_decl (get_identifier ("BOUNDS"), template_type, type,
3916 NULL_TREE, NULL_TREE, 0, 1);
3917 tree array_field
3918 = create_field_decl (get_identifier ("ARRAY"), object_type, type,
3919 NULL_TREE, NULL_TREE, 0, 1);
27becfc8 3920
3921 TYPE_NAME (type) = name;
3922 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
1767a056 3923 DECL_CHAIN (template_field) = array_field;
8c77dd48 3924 finish_record_type (type, template_field, 0, true);
3925
3926 /* Declare it now since it will never be declared otherwise. This is
3927 necessary to ensure that its subtrees are properly marked. */
ba502e2b 3928 decl = create_type_decl (name, type, true, debug_info_p, Empty);
3929
3930 /* template_type will not be used elsewhere than here, so to keep the debug
3931 info clean and in order to avoid scoping issues, make decl its
3932 context. */
3933 gnat_set_type_context (template_type, decl);
27becfc8 3934
3935 return type;
3936}
3937
3938/* Same, taking a thin or fat pointer type instead of a template type. */
3939
3940tree
3941build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
8c77dd48 3942 tree name, bool debug_info_p)
27becfc8 3943{
3944 tree template_type;
3945
a98f6bec 3946 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
27becfc8 3947
3948 template_type
a98f6bec 3949 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
1767a056 3950 ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
27becfc8 3951 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
8c77dd48 3952
3953 return
3954 build_unc_object_type (template_type, object_type, name, debug_info_p);
27becfc8 3955}
27becfc8 3956\f
27dd98d5 3957/* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
3958 In the normal case this is just two adjustments, but we have more to
3959 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
27becfc8 3960
3961void
3962update_pointer_to (tree old_type, tree new_type)
3963{
3964 tree ptr = TYPE_POINTER_TO (old_type);
3965 tree ref = TYPE_REFERENCE_TO (old_type);
cfbbebf3 3966 tree t;
27becfc8 3967
3968 /* If this is the main variant, process all the other variants first. */
3969 if (TYPE_MAIN_VARIANT (old_type) == old_type)
cfbbebf3 3970 for (t = TYPE_NEXT_VARIANT (old_type); t; t = TYPE_NEXT_VARIANT (t))
3971 update_pointer_to (t, new_type);
27becfc8 3972
27dd98d5 3973 /* If no pointers and no references, we are done. */
27becfc8 3974 if (!ptr && !ref)
3975 return;
3976
3977 /* Merge the old type qualifiers in the new type.
3978
3979 Each old variant has qualifiers for specific reasons, and the new
27dd98d5 3980 designated type as well. Each set of qualifiers represents useful
27becfc8 3981 information grabbed at some point, and merging the two simply unifies
3982 these inputs into the final type description.
3983
3984 Consider for instance a volatile type frozen after an access to constant
27dd98d5 3985 type designating it; after the designated type's freeze, we get here with
3986 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
3987 when the access type was processed. We will make a volatile and readonly
27becfc8 3988 designated type, because that's what it really is.
3989
27dd98d5 3990 We might also get here for a non-dummy OLD_TYPE variant with different
3991 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
27becfc8 3992 to private record type elaboration (see the comments around the call to
27dd98d5 3993 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
3994 the qualifiers in those cases too, to avoid accidentally discarding the
3995 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
3996 new_type
3997 = build_qualified_type (new_type,
3998 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
3999
4000 /* If old type and new type are identical, there is nothing to do. */
27becfc8 4001 if (old_type == new_type)
4002 return;
4003
4004 /* Otherwise, first handle the simple case. */
4005 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
4006 {
cfbbebf3 4007 tree new_ptr, new_ref;
4008
4009 /* If pointer or reference already points to new type, nothing to do.
4010 This can happen as update_pointer_to can be invoked multiple times
4011 on the same couple of types because of the type variants. */
4012 if ((ptr && TREE_TYPE (ptr) == new_type)
4013 || (ref && TREE_TYPE (ref) == new_type))
4014 return;
4015
4016 /* Chain PTR and its variants at the end. */
4017 new_ptr = TYPE_POINTER_TO (new_type);
4018 if (new_ptr)
4019 {
4020 while (TYPE_NEXT_PTR_TO (new_ptr))
4021 new_ptr = TYPE_NEXT_PTR_TO (new_ptr);
4022 TYPE_NEXT_PTR_TO (new_ptr) = ptr;
4023 }
4024 else
4025 TYPE_POINTER_TO (new_type) = ptr;
27becfc8 4026
cfbbebf3 4027 /* Now adjust them. */
27becfc8 4028 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
cfbbebf3 4029 for (t = TYPE_MAIN_VARIANT (ptr); t; t = TYPE_NEXT_VARIANT (t))
26bf1588 4030 {
4031 TREE_TYPE (t) = new_type;
4032 if (TYPE_NULL_BOUNDS (t))
4033 TREE_TYPE (TREE_OPERAND (TYPE_NULL_BOUNDS (t), 0)) = new_type;
4034 }
2c12d03b 4035
cfbbebf3 4036 /* Chain REF and its variants at the end. */
4037 new_ref = TYPE_REFERENCE_TO (new_type);
4038 if (new_ref)
4039 {
4040 while (TYPE_NEXT_REF_TO (new_ref))
4041 new_ref = TYPE_NEXT_REF_TO (new_ref);
4042 TYPE_NEXT_REF_TO (new_ref) = ref;
4043 }
4044 else
4045 TYPE_REFERENCE_TO (new_type) = ref;
4046
4047 /* Now adjust them. */
27becfc8 4048 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
cfbbebf3 4049 for (t = TYPE_MAIN_VARIANT (ref); t; t = TYPE_NEXT_VARIANT (t))
4050 TREE_TYPE (t) = new_type;
2c12d03b 4051
4052 TYPE_POINTER_TO (old_type) = NULL_TREE;
11583c93 4053 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
27becfc8 4054 }
4055
cfbbebf3 4056 /* Now deal with the unconstrained array case. In this case the pointer
4057 is actually a record where both fields are pointers to dummy nodes.
41dd28aa 4058 Turn them into pointers to the correct types using update_pointer_to.
4059 Likewise for the pointer to the object record (thin pointer). */
27becfc8 4060 else
4061 {
41dd28aa 4062 tree new_ptr = TYPE_POINTER_TO (new_type);
cfbbebf3 4063
4064 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr));
4065
41dd28aa 4066 /* If PTR already points to NEW_TYPE, nothing to do. This can happen
cfbbebf3 4067 since update_pointer_to can be invoked multiple times on the same
4068 couple of types because of the type variants. */
4069 if (TYPE_UNCONSTRAINED_ARRAY (ptr) == new_type)
4070 return;
4071
27becfc8 4072 update_pointer_to
41dd28aa 4073 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
4074 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))));
27becfc8 4075
27becfc8 4076 update_pointer_to
41dd28aa 4077 (TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr)))),
4078 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr)))));
cfbbebf3 4079
41dd28aa 4080 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type),
4081 TYPE_OBJECT_RECORD_TYPE (new_type));
27becfc8 4082
41dd28aa 4083 TYPE_POINTER_TO (old_type) = NULL_TREE;
6a1231a5 4084 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
27becfc8 4085 }
4086}
4087\f
d47e2843 4088/* Convert EXPR, a pointer to a constrained array, into a pointer to an
4089 unconstrained one. This involves making or finding a template. */
27becfc8 4090
4091static tree
4092convert_to_fat_pointer (tree type, tree expr)
4093{
1767a056 4094 tree template_type = TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))));
d47e2843 4095 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
27becfc8 4096 tree etype = TREE_TYPE (expr);
345e38c3 4097 tree template_addr;
f1f41a6c 4098 vec<constructor_elt, va_gc> *v;
4099 vec_alloc (v, 2);
27becfc8 4100
26bf1588 4101 /* If EXPR is null, make a fat pointer that contains a null pointer to the
4102 array (compare_fat_pointers ensures that this is the full discriminant)
4103 and a valid pointer to the bounds. This latter property is necessary
4104 since the compiler can hoist the load of the bounds done through it. */
27becfc8 4105 if (integer_zerop (expr))
5cd86571 4106 {
26bf1588 4107 tree ptr_template_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4108 tree null_bounds, t;
4109
4110 if (TYPE_NULL_BOUNDS (ptr_template_type))
4111 null_bounds = TYPE_NULL_BOUNDS (ptr_template_type);
4112 else
4113 {
4114 /* The template type can still be dummy at this point so we build an
4115 empty constructor. The middle-end will fill it in with zeros. */
4d585594 4116 t = build_constructor (template_type, NULL);
26bf1588 4117 TREE_CONSTANT (t) = TREE_STATIC (t) = 1;
4118 null_bounds = build_unary_op (ADDR_EXPR, NULL_TREE, t);
4119 SET_TYPE_NULL_BOUNDS (ptr_template_type, null_bounds);
4120 }
4121
5cd86571 4122 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
26bf1588 4123 fold_convert (p_array_type, null_pointer_node));
4124 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), null_bounds);
4125 t = build_constructor (type, v);
4126 /* Do not set TREE_CONSTANT so as to force T to static memory. */
4127 TREE_CONSTANT (t) = 0;
4128 TREE_STATIC (t) = 1;
4129
4130 return t;
5cd86571 4131 }
27becfc8 4132
7b4b0e11 4133 /* If EXPR is a thin pointer, make template and data from the record. */
4134 if (TYPE_IS_THIN_POINTER_P (etype))
27becfc8 4135 {
7b4b0e11 4136 tree field = TYPE_FIELDS (TREE_TYPE (etype));
27becfc8 4137
df8a2682 4138 expr = gnat_protect_expr (expr);
345e38c3 4139
4140 /* If we have a TYPE_UNCONSTRAINED_ARRAY attached to the RECORD_TYPE,
4141 the thin pointer value has been shifted so we shift it back to get
4142 the template address. */
4143 if (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)))
dd0cd1e4 4144 {
345e38c3 4145 template_addr
4146 = build_binary_op (POINTER_PLUS_EXPR, etype, expr,
4147 fold_build1 (NEGATE_EXPR, sizetype,
4148 byte_position
4149 (DECL_CHAIN (field))));
4150 template_addr
4151 = fold_convert (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))),
4152 template_addr);
dd0cd1e4 4153 }
27becfc8 4154
345e38c3 4155 /* Otherwise we explicitly take the address of the fields. */
4156 else
4157 {
4158 expr = build_unary_op (INDIRECT_REF, NULL_TREE, expr);
4159 template_addr
4160 = build_unary_op (ADDR_EXPR, NULL_TREE,
c860752a 4161 build_component_ref (expr, field, false));
345e38c3 4162 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
c860752a 4163 build_component_ref (expr, DECL_CHAIN (field),
345e38c3 4164 false));
4165 }
27becfc8 4166 }
d47e2843 4167
4168 /* Otherwise, build the constructor for the template. */
27becfc8 4169 else
345e38c3 4170 template_addr
4171 = build_unary_op (ADDR_EXPR, NULL_TREE,
4172 build_template (template_type, TREE_TYPE (etype),
4173 expr));
27becfc8 4174
d47e2843 4175 /* The final result is a constructor for the fat pointer.
27becfc8 4176
d47e2843 4177 If EXPR is an argument of a foreign convention subprogram, the type it
4178 points to is directly the component type. In this case, the expression
27becfc8 4179 type may not match the corresponding FIELD_DECL type at this point, so we
d47e2843 4180 call "convert" here to fix that up if necessary. This type consistency is
27becfc8 4181 required, for instance because it ensures that possible later folding of
d47e2843 4182 COMPONENT_REFs against this constructor always yields something of the
27becfc8 4183 same type as the initial reference.
4184
d47e2843 4185 Note that the call to "build_template" above is still fine because it
4186 will only refer to the provided TEMPLATE_TYPE in this case. */
345e38c3 4187 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type), convert (p_array_type, expr));
4188 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), template_addr);
5cd86571 4189 return gnat_build_constructor (type, v);
27becfc8 4190}
4191\f
27becfc8 4192/* Create an expression whose value is that of EXPR,
4193 converted to type TYPE. The TREE_TYPE of the value
4194 is always TYPE. This function implements all reasonable
4195 conversions; callers should filter out those that are
4196 not permitted by the language being compiled. */
4197
4198tree
4199convert (tree type, tree expr)
4200{
27becfc8 4201 tree etype = TREE_TYPE (expr);
4202 enum tree_code ecode = TREE_CODE (etype);
2b161576 4203 enum tree_code code = TREE_CODE (type);
27becfc8 4204
2b161576 4205 /* If the expression is already of the right type, we are done. */
4206 if (etype == type)
27becfc8 4207 return expr;
4208
4209 /* If both input and output have padding and are of variable size, do this
4210 as an unchecked conversion. Likewise if one is a mere variant of the
4211 other, so we avoid a pointless unpad/repad sequence. */
4212 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
a98f6bec 4213 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
27becfc8 4214 && (!TREE_CONSTANT (TYPE_SIZE (type))
4215 || !TREE_CONSTANT (TYPE_SIZE (etype))
76cb9822 4216 || TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
27becfc8 4217 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
4218 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
4219 ;
4220
9fb1518c 4221 /* If the output type has padding, convert to the inner type and make a
4222 constructor to build the record, unless a variable size is involved. */
a98f6bec 4223 else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
27becfc8 4224 {
f1f41a6c 4225 vec<constructor_elt, va_gc> *v;
5cd86571 4226
27becfc8 4227 /* If we previously converted from another type and our type is
4228 of variable size, remove the conversion to avoid the need for
9fb1518c 4229 variable-sized temporaries. Likewise for a conversion between
27becfc8 4230 original and packable version. */
4231 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4232 && (!TREE_CONSTANT (TYPE_SIZE (type))
4233 || (ecode == RECORD_TYPE
4234 && TYPE_NAME (etype)
4235 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
4236 expr = TREE_OPERAND (expr, 0);
4237
4238 /* If we are just removing the padding from expr, convert the original
4239 object if we have variable size in order to avoid the need for some
9fb1518c 4240 variable-sized temporaries. Likewise if the padding is a variant
27becfc8 4241 of the other, so we avoid a pointless unpad/repad sequence. */
4242 if (TREE_CODE (expr) == COMPONENT_REF
27becfc8 4243 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
4244 && (!TREE_CONSTANT (TYPE_SIZE (type))
76cb9822 4245 || TYPE_MAIN_VARIANT (type)
4246 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))
27becfc8 4247 || (ecode == RECORD_TYPE
4248 && TYPE_NAME (etype)
4249 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
4250 return convert (type, TREE_OPERAND (expr, 0));
4251
50eca4c8 4252 /* If the inner type is of self-referential size and the expression type
4253 is a record, do this as an unchecked conversion. But first pad the
4254 expression if possible to have the same size on both sides. */
2b161576 4255 if (ecode == RECORD_TYPE
9fb1518c 4256 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
50eca4c8 4257 {
2ca11b4d 4258 if (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST)
50eca4c8 4259 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
2ca11b4d 4260 false, false, false, true),
4261 expr);
50eca4c8 4262 return unchecked_convert (type, expr, false);
4263 }
27becfc8 4264
9fb1518c 4265 /* If we are converting between array types with variable size, do the
4266 final conversion as an unchecked conversion, again to avoid the need
4267 for some variable-sized temporaries. If valid, this conversion is
4268 very likely purely technical and without real effects. */
2b161576 4269 if (ecode == ARRAY_TYPE
9fb1518c 4270 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
4271 && !TREE_CONSTANT (TYPE_SIZE (etype))
4272 && !TREE_CONSTANT (TYPE_SIZE (type)))
4273 return unchecked_convert (type,
4274 convert (TREE_TYPE (TYPE_FIELDS (type)),
4275 expr),
4276 false);
4277
f1f41a6c 4278 vec_alloc (v, 1);
5cd86571 4279 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4280 convert (TREE_TYPE (TYPE_FIELDS (type)), expr));
4281 return gnat_build_constructor (type, v);
27becfc8 4282 }
4283
4284 /* If the input type has padding, remove it and convert to the output type.
4285 The conditions ordering is arranged to ensure that the output type is not
4286 a padding type here, as it is not clear whether the conversion would
4287 always be correct if this was to happen. */
a98f6bec 4288 else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
27becfc8 4289 {
4290 tree unpadded;
4291
4292 /* If we have just converted to this padded type, just get the
4293 inner expression. */
cdbc31ab 4294 if (TREE_CODE (expr) == CONSTRUCTOR)
4295 unpadded = CONSTRUCTOR_ELT (expr, 0)->value;
27becfc8 4296
4297 /* Otherwise, build an explicit component reference. */
4298 else
c860752a 4299 unpadded = build_component_ref (expr, TYPE_FIELDS (etype), false);
27becfc8 4300
4301 return convert (type, unpadded);
4302 }
4303
8fc51369 4304 /* If the input is a biased type, convert first to the base type and add
4305 the bias. Note that the bias must go through a full conversion to the
4306 base type, lest it is itself a biased value; this happens for subtypes
4307 of biased types. */
27becfc8 4308 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4309 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
6fa4bf38 4310 fold_convert (TREE_TYPE (etype), expr),
8fc51369 4311 convert (TREE_TYPE (etype),
4312 TYPE_MIN_VALUE (etype))));
27becfc8 4313
4314 /* If the input is a justified modular type, we need to extract the actual
4315 object before converting it to any other type with the exceptions of an
4316 unconstrained array or of a mere type variant. It is useful to avoid the
4317 extraction and conversion in the type variant case because it could end
4318 up replacing a VAR_DECL expr by a constructor and we might be about the
4319 take the address of the result. */
4320 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
4321 && code != UNCONSTRAINED_ARRAY_TYPE
4322 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
c860752a 4323 return
4324 convert (type, build_component_ref (expr, TYPE_FIELDS (etype), false));
27becfc8 4325
4326 /* If converting to a type that contains a template, convert to the data
4327 type and then build the template. */
4328 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
4329 {
1767a056 4330 tree obj_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
f1f41a6c 4331 vec<constructor_elt, va_gc> *v;
4332 vec_alloc (v, 2);
27becfc8 4333
4334 /* If the source already has a template, get a reference to the
4335 associated array only, as we are going to rebuild a template
4336 for the target type anyway. */
4337 expr = maybe_unconstrained_array (expr);
4338
5cd86571 4339 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4340 build_template (TREE_TYPE (TYPE_FIELDS (type)),
4341 obj_type, NULL_TREE));
c70df25e 4342 if (expr)
4343 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
4344 convert (obj_type, expr));
5cd86571 4345 return gnat_build_constructor (type, v);
27becfc8 4346 }
4347
0b1f7790 4348 /* There are some cases of expressions that we process specially. */
27becfc8 4349 switch (TREE_CODE (expr))
4350 {
4351 case ERROR_MARK:
4352 return expr;
4353
4354 case NULL_EXPR:
4355 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
4356 conversion in gnat_expand_expr. NULL_EXPR does not represent
4357 and actual value, so no conversion is needed. */
4358 expr = copy_node (expr);
4359 TREE_TYPE (expr) = type;
4360 return expr;
4361
4362 case STRING_CST:
4363 /* If we are converting a STRING_CST to another constrained array type,
4364 just make a new one in the proper type. */
4365 if (code == ecode && AGGREGATE_TYPE_P (etype)
4366 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
4367 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
4368 {
4369 expr = copy_node (expr);
4370 TREE_TYPE (expr) = type;
4371 return expr;
4372 }
4373 break;
4374
52dd2567 4375 case VECTOR_CST:
62092ba6 4376 /* If we are converting a VECTOR_CST to a mere type variant, just make
52dd2567 4377 a new one in the proper type. */
4378 if (code == ecode && gnat_types_compatible_p (type, etype))
4379 {
4380 expr = copy_node (expr);
4381 TREE_TYPE (expr) = type;
4382 return expr;
4383 }
b7066486 4384 break;
c3d5d916 4385
27becfc8 4386 case CONSTRUCTOR:
62092ba6 4387 /* If we are converting a CONSTRUCTOR to a mere type variant, or to
4388 another padding type around the same type, just make a new one in
4389 the proper type. */
4390 if (code == ecode
4391 && (gnat_types_compatible_p (type, etype)
4392 || (code == RECORD_TYPE
4393 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4394 && TREE_TYPE (TYPE_FIELDS (type))
4395 == TREE_TYPE (TYPE_FIELDS (etype)))))
27becfc8 4396 {
4397 expr = copy_node (expr);
4398 TREE_TYPE (expr) = type;
f1f41a6c 4399 CONSTRUCTOR_ELTS (expr) = vec_safe_copy (CONSTRUCTOR_ELTS (expr));
27becfc8 4400 return expr;
4401 }
4402
e568189f 4403 /* Likewise for a conversion between original and packable version, or
4404 conversion between types of the same size and with the same list of
4405 fields, but we have to work harder to preserve type consistency. */
27becfc8 4406 if (code == ecode
4407 && code == RECORD_TYPE
e568189f 4408 && (TYPE_NAME (type) == TYPE_NAME (etype)
4409 || tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (etype))))
4410
27becfc8 4411 {
f1f41a6c 4412 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4413 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4414 vec<constructor_elt, va_gc> *v;
4415 vec_alloc (v, len);
27becfc8 4416 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
4417 unsigned HOST_WIDE_INT idx;
4418 tree index, value;
4419
d515c104 4420 /* Whether we need to clear TREE_CONSTANT et al. on the output
4421 constructor when we convert in place. */
4422 bool clear_constant = false;
4423
27becfc8 4424 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
4425 {
62092ba6 4426 /* Skip the missing fields in the CONSTRUCTOR. */
4427 while (efield && field && !SAME_FIELD_P (efield, index))
4428 {
4429 efield = DECL_CHAIN (efield);
4430 field = DECL_CHAIN (field);
4431 }
e568189f 4432 /* The field must be the same. */
62092ba6 4433 if (!(efield && field && SAME_FIELD_P (efield, field)))
27becfc8 4434 break;
62092ba6 4435 constructor_elt elt
4436 = {field, convert (TREE_TYPE (field), value)};
f1f41a6c 4437 v->quick_push (elt);
d515c104 4438
4439 /* If packing has made this field a bitfield and the input
4440 value couldn't be emitted statically any more, we need to
4441 clear TREE_CONSTANT on our output. */
9bfb1138 4442 if (!clear_constant
4443 && TREE_CONSTANT (expr)
d515c104 4444 && !CONSTRUCTOR_BITFIELD_P (efield)
4445 && CONSTRUCTOR_BITFIELD_P (field)
4446 && !initializer_constant_valid_for_bitfield_p (value))
4447 clear_constant = true;
4448
1767a056 4449 efield = DECL_CHAIN (efield);
4450 field = DECL_CHAIN (field);
27becfc8 4451 }
4452
d515c104 4453 /* If we have been able to match and convert all the input fields
4454 to their output type, convert in place now. We'll fallback to a
4455 view conversion downstream otherwise. */
27becfc8 4456 if (idx == len)
4457 {
4458 expr = copy_node (expr);
4459 TREE_TYPE (expr) = type;
4460 CONSTRUCTOR_ELTS (expr) = v;
d515c104 4461 if (clear_constant)
9bfb1138 4462 TREE_CONSTANT (expr) = TREE_STATIC (expr) = 0;
27becfc8 4463 return expr;
4464 }
4465 }
52dd2567 4466
4467 /* Likewise for a conversion between array type and vector type with a
4468 compatible representative array. */
4469 else if (code == VECTOR_TYPE
4470 && ecode == ARRAY_TYPE
4471 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4472 etype))
4473 {
f1f41a6c 4474 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4475 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4476 vec<constructor_elt, va_gc> *v;
52dd2567 4477 unsigned HOST_WIDE_INT ix;
4478 tree value;
4479
4480 /* Build a VECTOR_CST from a *constant* array constructor. */
4481 if (TREE_CONSTANT (expr))
4482 {
4483 bool constant_p = true;
4484
4485 /* Iterate through elements and check if all constructor
4486 elements are *_CSTs. */
4487 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4488 if (!CONSTANT_CLASS_P (value))
4489 {
4490 constant_p = false;
4491 break;
4492 }
4493
4494 if (constant_p)
4495 return build_vector_from_ctor (type,
4496 CONSTRUCTOR_ELTS (expr));
4497 }
4498
4499 /* Otherwise, build a regular vector constructor. */
f1f41a6c 4500 vec_alloc (v, len);
52dd2567 4501 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4502 {
e82e4eb5 4503 constructor_elt elt = {NULL_TREE, value};
f1f41a6c 4504 v->quick_push (elt);
52dd2567 4505 }
4506 expr = copy_node (expr);
4507 TREE_TYPE (expr) = type;
4508 CONSTRUCTOR_ELTS (expr) = v;
4509 return expr;
4510 }
27becfc8 4511 break;
4512
4513 case UNCONSTRAINED_ARRAY_REF:
0b1f7790 4514 /* First retrieve the underlying array. */
4515 expr = maybe_unconstrained_array (expr);
4516 etype = TREE_TYPE (expr);
4517 ecode = TREE_CODE (etype);
4518 break;
27becfc8 4519
4520 case VIEW_CONVERT_EXPR:
4521 {
4522 /* GCC 4.x is very sensitive to type consistency overall, and view
4523 conversions thus are very frequent. Even though just "convert"ing
4524 the inner operand to the output type is fine in most cases, it
4525 might expose unexpected input/output type mismatches in special
4526 circumstances so we avoid such recursive calls when we can. */
4527 tree op0 = TREE_OPERAND (expr, 0);
4528
4529 /* If we are converting back to the original type, we can just
4530 lift the input conversion. This is a common occurrence with
4531 switches back-and-forth amongst type variants. */
4532 if (type == TREE_TYPE (op0))
4533 return op0;
4534
52dd2567 4535 /* Otherwise, if we're converting between two aggregate or vector
4536 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4537 target type in place or to just convert the inner expression. */
4538 if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4539 || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
27becfc8 4540 {
4541 /* If we are converting between mere variants, we can just
4542 substitute the VIEW_CONVERT_EXPR in place. */
4543 if (gnat_types_compatible_p (type, etype))
4544 return build1 (VIEW_CONVERT_EXPR, type, op0);
4545
4546 /* Otherwise, we may just bypass the input view conversion unless
4547 one of the types is a fat pointer, which is handled by
4548 specialized code below which relies on exact type matching. */
a98f6bec 4549 else if (!TYPE_IS_FAT_POINTER_P (type)
4550 && !TYPE_IS_FAT_POINTER_P (etype))
27becfc8 4551 return convert (type, op0);
4552 }
d949134d 4553
4554 break;
27becfc8 4555 }
27becfc8 4556
27becfc8 4557 default:
4558 break;
4559 }
4560
4561 /* Check for converting to a pointer to an unconstrained array. */
a98f6bec 4562 if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
27becfc8 4563 return convert_to_fat_pointer (type, expr);
4564
52dd2567 4565 /* If we are converting between two aggregate or vector types that are mere
4566 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4567 to a vector type from its representative array type. */
4568 else if ((code == ecode
4569 && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4570 && gnat_types_compatible_p (type, etype))
4571 || (code == VECTOR_TYPE
4572 && ecode == ARRAY_TYPE
4573 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4574 etype)))
27becfc8 4575 return build1 (VIEW_CONVERT_EXPR, type, expr);
4576
cfc3dd35 4577 /* If we are converting between tagged types, try to upcast properly. */
4578 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4579 && TYPE_ALIGN_OK (etype) && TYPE_ALIGN_OK (type))
4580 {
4581 tree child_etype = etype;
4582 do {
4583 tree field = TYPE_FIELDS (child_etype);
4584 if (DECL_NAME (field) == parent_name_id && TREE_TYPE (field) == type)
c860752a 4585 return build_component_ref (expr, field, false);
cfc3dd35 4586 child_etype = TREE_TYPE (field);
4587 } while (TREE_CODE (child_etype) == RECORD_TYPE);
4588 }
4589
a88c8608 4590 /* If we are converting from a smaller form of record type back to it, just
4591 make a VIEW_CONVERT_EXPR. But first pad the expression to have the same
4592 size on both sides. */
4593 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4594 && smaller_form_type_p (etype, type))
4595 {
4596 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4597 false, false, false, true),
4598 expr);
4599 return build1 (VIEW_CONVERT_EXPR, type, expr);
4600 }
4601
27becfc8 4602 /* In all other cases of related types, make a NOP_EXPR. */
22582d86 4603 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
27becfc8 4604 return fold_convert (type, expr);
4605
4606 switch (code)
4607 {
4608 case VOID_TYPE:
4609 return fold_build1 (CONVERT_EXPR, type, expr);
4610
27becfc8 4611 case INTEGER_TYPE:
4612 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4613 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4614 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4615 return unchecked_convert (type, expr, false);
8fc51369 4616
4617 /* If the output is a biased type, convert first to the base type and
4618 subtract the bias. Note that the bias itself must go through a full
4619 conversion to the base type, lest it is a biased value; this happens
4620 for subtypes of biased types. */
4621 if (TYPE_BIASED_REPRESENTATION_P (type))
27becfc8 4622 return fold_convert (type,
4623 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4624 convert (TREE_TYPE (type), expr),
6fa4bf38 4625 convert (TREE_TYPE (type),
4626 TYPE_MIN_VALUE (type))));
27becfc8 4627
b7066486 4628 /* ... fall through ... */
27becfc8 4629
4630 case ENUMERAL_TYPE:
69c2baa9 4631 case BOOLEAN_TYPE:
27becfc8 4632 /* If we are converting an additive expression to an integer type
4633 with lower precision, be wary of the optimization that can be
4634 applied by convert_to_integer. There are 2 problematic cases:
4635 - if the first operand was originally of a biased type,
4636 because we could be recursively called to convert it
4637 to an intermediate type and thus rematerialize the
4638 additive operator endlessly,
4639 - if the expression contains a placeholder, because an
4640 intermediate conversion that changes the sign could
4641 be inserted and thus introduce an artificial overflow
4642 at compile time when the placeholder is substituted. */
4643 if (code == INTEGER_TYPE
4644 && ecode == INTEGER_TYPE
4645 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4646 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4647 {
4648 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4649
4650 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4651 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4652 || CONTAINS_PLACEHOLDER_P (expr))
4653 return build1 (NOP_EXPR, type, expr);
4654 }
4655
4656 return fold (convert_to_integer (type, expr));
4657
4658 case POINTER_TYPE:
4659 case REFERENCE_TYPE:
7b4b0e11 4660 /* If converting between two thin pointers, adjust if needed to account
dd0cd1e4 4661 for differing offsets from the base pointer, depending on whether
4662 there is a TYPE_UNCONSTRAINED_ARRAY attached to the record type. */
a98f6bec 4663 if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
27becfc8 4664 {
dd0cd1e4 4665 tree etype_pos
ea780bd9 4666 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype))
dd0cd1e4 4667 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (etype))))
4668 : size_zero_node;
4669 tree type_pos
ea780bd9 4670 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))
dd0cd1e4 4671 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (type))))
4672 : size_zero_node;
4673 tree byte_diff = size_diffop (type_pos, etype_pos);
7b4b0e11 4674
27becfc8 4675 expr = build1 (NOP_EXPR, type, expr);
27becfc8 4676 if (integer_zerop (byte_diff))
4677 return expr;
4678
4679 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
7b4b0e11 4680 fold_convert (sizetype, byte_diff));
27becfc8 4681 }
4682
7b4b0e11 4683 /* If converting fat pointer to normal or thin pointer, get the pointer
4684 to the array and then convert it. */
4685 if (TYPE_IS_FAT_POINTER_P (etype))
c860752a 4686 expr = build_component_ref (expr, TYPE_FIELDS (etype), false);
27becfc8 4687
4688 return fold (convert_to_pointer (type, expr));
4689
4690 case REAL_TYPE:
4691 return fold (convert_to_real (type, expr));
4692
4693 case RECORD_TYPE:
4694 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
5cd86571 4695 {
f1f41a6c 4696 vec<constructor_elt, va_gc> *v;
4697 vec_alloc (v, 1);
5cd86571 4698
4699 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4700 convert (TREE_TYPE (TYPE_FIELDS (type)),
4701 expr));
4702 return gnat_build_constructor (type, v);
4703 }
27becfc8 4704
b7066486 4705 /* ... fall through ... */
27becfc8 4706
4707 case ARRAY_TYPE:
4708 /* In these cases, assume the front-end has validated the conversion.
4709 If the conversion is valid, it will be a bit-wise conversion, so
4710 it can be viewed as an unchecked conversion. */
4711 return unchecked_convert (type, expr, false);
4712
4713 case UNION_TYPE:
4714 /* This is a either a conversion between a tagged type and some
4715 subtype, which we have to mark as a UNION_TYPE because of
4716 overlapping fields or a conversion of an Unchecked_Union. */
4717 return unchecked_convert (type, expr, false);
4718
4719 case UNCONSTRAINED_ARRAY_TYPE:
52dd2567 4720 /* If the input is a VECTOR_TYPE, convert to the representative
4721 array type first. */
4722 if (ecode == VECTOR_TYPE)
4723 {
4724 expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
4725 etype = TREE_TYPE (expr);
4726 ecode = TREE_CODE (etype);
4727 }
4728
27becfc8 4729 /* If EXPR is a constrained array, take its address, convert it to a
4730 fat pointer, and then dereference it. Likewise if EXPR is a
4731 record containing both a template and a constrained array.
4732 Note that a record representing a justified modular type
4733 always represents a packed constrained array. */
4734 if (ecode == ARRAY_TYPE
4735 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4736 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4737 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4738 return
4739 build_unary_op
4740 (INDIRECT_REF, NULL_TREE,
4741 convert_to_fat_pointer (TREE_TYPE (type),
4742 build_unary_op (ADDR_EXPR,
4743 NULL_TREE, expr)));
4744
4745 /* Do something very similar for converting one unconstrained
4746 array to another. */
4747 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4748 return
4749 build_unary_op (INDIRECT_REF, NULL_TREE,
4750 convert (TREE_TYPE (type),
4751 build_unary_op (ADDR_EXPR,
4752 NULL_TREE, expr)));
4753 else
4754 gcc_unreachable ();
4755
4756 case COMPLEX_TYPE:
4757 return fold (convert_to_complex (type, expr));
4758
4759 default:
4760 gcc_unreachable ();
4761 }
4762}
1d957068 4763
4764/* Create an expression whose value is that of EXPR converted to the common
4765 index type, which is sizetype. EXPR is supposed to be in the base type
4766 of the GNAT index type. Calling it is equivalent to doing
4767
4768 convert (sizetype, expr)
4769
4770 but we try to distribute the type conversion with the knowledge that EXPR
4771 cannot overflow in its type. This is a best-effort approach and we fall
4772 back to the above expression as soon as difficulties are encountered.
4773
4774 This is necessary to overcome issues that arise when the GNAT base index
4775 type and the GCC common index type (sizetype) don't have the same size,
4776 which is quite frequent on 64-bit architectures. In this case, and if
4777 the GNAT base index type is signed but the iteration type of the loop has
4778 been forced to unsigned, the loop scalar evolution engine cannot compute
4779 a simple evolution for the general induction variables associated with the
4780 array indices, because it will preserve the wrap-around semantics in the
4781 unsigned type of their "inner" part. As a result, many loop optimizations
4782 are blocked.
4783
4784 The solution is to use a special (basic) induction variable that is at
4785 least as large as sizetype, and to express the aforementioned general
4786 induction variables in terms of this induction variable, eliminating
4787 the problematic intermediate truncation to the GNAT base index type.
4788 This is possible as long as the original expression doesn't overflow
4789 and if the middle-end hasn't introduced artificial overflows in the
4790 course of the various simplification it can make to the expression. */
4791
4792tree
4793convert_to_index_type (tree expr)
4794{
4795 enum tree_code code = TREE_CODE (expr);
4796 tree type = TREE_TYPE (expr);
4797
4798 /* If the type is unsigned, overflow is allowed so we cannot be sure that
4799 EXPR doesn't overflow. Keep it simple if optimization is disabled. */
4800 if (TYPE_UNSIGNED (type) || !optimize)
4801 return convert (sizetype, expr);
4802
4803 switch (code)
4804 {
4805 case VAR_DECL:
4806 /* The main effect of the function: replace a loop parameter with its
4807 associated special induction variable. */
4808 if (DECL_LOOP_PARM_P (expr) && DECL_INDUCTION_VAR (expr))
4809 expr = DECL_INDUCTION_VAR (expr);
4810 break;
4811
4812 CASE_CONVERT:
4813 {
4814 tree otype = TREE_TYPE (TREE_OPERAND (expr, 0));
4815 /* Bail out as soon as we suspect some sort of type frobbing. */
4816 if (TYPE_PRECISION (type) != TYPE_PRECISION (otype)
4817 || TYPE_UNSIGNED (type) != TYPE_UNSIGNED (otype))
4818 break;
4819 }
4820
b7066486 4821 /* ... fall through ... */
1d957068 4822
4823 case NON_LVALUE_EXPR:
4824 return fold_build1 (code, sizetype,
4825 convert_to_index_type (TREE_OPERAND (expr, 0)));
4826
4827 case PLUS_EXPR:
4828 case MINUS_EXPR:
4829 case MULT_EXPR:
4830 return fold_build2 (code, sizetype,
4831 convert_to_index_type (TREE_OPERAND (expr, 0)),
4832 convert_to_index_type (TREE_OPERAND (expr, 1)));
4833
4834 case COMPOUND_EXPR:
4835 return fold_build2 (code, sizetype, TREE_OPERAND (expr, 0),
4836 convert_to_index_type (TREE_OPERAND (expr, 1)));
4837
4838 case COND_EXPR:
4839 return fold_build3 (code, sizetype, TREE_OPERAND (expr, 0),
4840 convert_to_index_type (TREE_OPERAND (expr, 1)),
4841 convert_to_index_type (TREE_OPERAND (expr, 2)));
4842
4843 default:
4844 break;
4845 }
4846
4847 return convert (sizetype, expr);
4848}
27becfc8 4849\f
4850/* Remove all conversions that are done in EXP. This includes converting
4851 from a padded type or to a justified modular type. If TRUE_ADDRESS
4852 is true, always return the address of the containing object even if
4853 the address is not bit-aligned. */
4854
4855tree
4856remove_conversions (tree exp, bool true_address)
4857{
4858 switch (TREE_CODE (exp))
4859 {
4860 case CONSTRUCTOR:
4861 if (true_address
4862 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4863 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
4864 return
cdbc31ab 4865 remove_conversions (CONSTRUCTOR_ELT (exp, 0)->value, true);
27becfc8 4866 break;
4867
4868 case COMPONENT_REF:
a98f6bec 4869 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
27becfc8 4870 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4871 break;
4872
27becfc8 4873 CASE_CONVERT:
7ad4f11d 4874 case VIEW_CONVERT_EXPR:
4875 case NON_LVALUE_EXPR:
27becfc8 4876 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4877
4878 default:
4879 break;
4880 }
4881
4882 return exp;
4883}
4884\f
4885/* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
22582d86 4886 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
27becfc8 4887 likewise return an expression pointing to the underlying array. */
4888
4889tree
4890maybe_unconstrained_array (tree exp)
4891{
4892 enum tree_code code = TREE_CODE (exp);
e69771d4 4893 tree type = TREE_TYPE (exp);
27becfc8 4894
e69771d4 4895 switch (TREE_CODE (type))
27becfc8 4896 {
4897 case UNCONSTRAINED_ARRAY_TYPE:
4898 if (code == UNCONSTRAINED_ARRAY_REF)
4899 {
fd8d2914 4900 const bool read_only = TREE_READONLY (exp);
0b1f7790 4901 const bool no_trap = TREE_THIS_NOTRAP (exp);
4902
fd8d2914 4903 exp = TREE_OPERAND (exp, 0);
e69771d4 4904 type = TREE_TYPE (exp);
4905
fd8d2914 4906 if (TREE_CODE (exp) == COND_EXPR)
4907 {
4908 tree op1
4909 = build_unary_op (INDIRECT_REF, NULL_TREE,
4910 build_component_ref (TREE_OPERAND (exp, 1),
e69771d4 4911 TYPE_FIELDS (type),
fd8d2914 4912 false));
4913 tree op2
4914 = build_unary_op (INDIRECT_REF, NULL_TREE,
4915 build_component_ref (TREE_OPERAND (exp, 2),
e69771d4 4916 TYPE_FIELDS (type),
fd8d2914 4917 false));
4918
4919 exp = build3 (COND_EXPR,
e69771d4 4920 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))),
fd8d2914 4921 TREE_OPERAND (exp, 0), op1, op2);
4922 }
4923 else
0b1f7790 4924 {
4925 exp = build_unary_op (INDIRECT_REF, NULL_TREE,
c860752a 4926 build_component_ref (exp,
4927 TYPE_FIELDS (type),
0b1f7790 4928 false));
4929 TREE_READONLY (exp) = read_only;
4930 TREE_THIS_NOTRAP (exp) = no_trap;
4931 }
27becfc8 4932 }
4933
4934 else if (code == NULL_EXPR)
e69771d4 4935 exp = build1 (NULL_EXPR,
4936 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type)))),
4937 TREE_OPERAND (exp, 0));
4938 break;
27becfc8 4939
4940 case RECORD_TYPE:
e69771d4 4941 /* If this is a padded type and it contains a template, convert to the
4942 unpadded type first. */
4943 if (TYPE_PADDING_P (type)
4944 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
4945 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
27becfc8 4946 {
e69771d4 4947 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
c860752a 4948 code = TREE_CODE (exp);
e69771d4 4949 type = TREE_TYPE (exp);
4950 }
4951
4952 if (TYPE_CONTAINS_TEMPLATE_P (type))
4953 {
c860752a 4954 /* If the array initializer is a box, return NULL_TREE. */
4955 if (code == CONSTRUCTOR && CONSTRUCTOR_NELTS (exp) < 2)
4956 return NULL_TREE;
4957
4958 exp = build_component_ref (exp, DECL_CHAIN (TYPE_FIELDS (type)),
4959 false);
4960 type = TREE_TYPE (exp);
e69771d4 4961
4962 /* If the array type is padded, convert to the unpadded type. */
c860752a 4963 if (TYPE_IS_PADDING_P (type))
4964 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
27becfc8 4965 }
27becfc8 4966 break;
4967
4968 default:
4969 break;
4970 }
4971
4972 return exp;
4973}
4974\f
4d00c918 4975/* Return true if EXPR is an expression that can be folded as an operand
a9538d68 4976 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
4d00c918 4977
4978static bool
4979can_fold_for_view_convert_p (tree expr)
4980{
4981 tree t1, t2;
4982
4983 /* The folder will fold NOP_EXPRs between integral types with the same
4984 precision (in the middle-end's sense). We cannot allow it if the
4985 types don't have the same precision in the Ada sense as well. */
4986 if (TREE_CODE (expr) != NOP_EXPR)
4987 return true;
4988
4989 t1 = TREE_TYPE (expr);
4990 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
4991
4992 /* Defer to the folder for non-integral conversions. */
4993 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
4994 return true;
4995
4996 /* Only fold conversions that preserve both precisions. */
4997 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
4998 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
4999 return true;
5000
5001 return false;
5002}
5003
27becfc8 5004/* Return an expression that does an unchecked conversion of EXPR to TYPE.
4d00c918 5005 If NOTRUNC_P is true, truncation operations should be suppressed.
5006
5007 Special care is required with (source or target) integral types whose
5008 precision is not equal to their size, to make sure we fetch or assign
5009 the value bits whose location might depend on the endianness, e.g.
5010
5011 Rmsize : constant := 8;
5012 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
5013
5014 type Bit_Array is array (1 .. Rmsize) of Boolean;
5015 pragma Pack (Bit_Array);
5016
5017 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
5018
5019 Value : Int := 2#1000_0001#;
5020 Vbits : Bit_Array := To_Bit_Array (Value);
5021
5022 we expect the 8 bits at Vbits'Address to always contain Value, while
5023 their original location depends on the endianness, at Value'Address
a84cc613 5024 on a little-endian architecture but not on a big-endian one.
5025
5026 One pitfall is that we cannot use TYPE_UNSIGNED directly to decide how
5027 the bits between the precision and the size are filled, because of the
5028 trick used in the E_Signed_Integer_Subtype case of gnat_to_gnu_entity.
5029 So we use the special predicate type_unsigned_for_rm above. */
27becfc8 5030
5031tree
5032unchecked_convert (tree type, tree expr, bool notrunc_p)
5033{
5034 tree etype = TREE_TYPE (expr);
2b161576 5035 enum tree_code ecode = TREE_CODE (etype);
5036 enum tree_code code = TREE_CODE (type);
fc07fe6f 5037 tree tem;
2ca11b4d 5038 int c;
27becfc8 5039
2b161576 5040 /* If the expression is already of the right type, we are done. */
27becfc8 5041 if (etype == type)
5042 return expr;
5043
47ae02b7 5044 /* If both types are integral just do a normal conversion.
27becfc8 5045 Likewise for a conversion to an unconstrained array. */
6fa4bf38 5046 if (((INTEGRAL_TYPE_P (type)
7b4b0e11 5047 || (POINTER_TYPE_P (type) && !TYPE_IS_THIN_POINTER_P (type))
2b161576 5048 || (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type)))
6fa4bf38 5049 && (INTEGRAL_TYPE_P (etype)
a98f6bec 5050 || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
2b161576 5051 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype))))
5052 || code == UNCONSTRAINED_ARRAY_TYPE)
27becfc8 5053 {
2b161576 5054 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
27becfc8 5055 {
5056 tree ntype = copy_type (etype);
27becfc8 5057 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
5058 TYPE_MAIN_VARIANT (ntype) = ntype;
5059 expr = build1 (NOP_EXPR, ntype, expr);
5060 }
5061
2b161576 5062 if (code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
27becfc8 5063 {
4d00c918 5064 tree rtype = copy_type (type);
27becfc8 5065 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
5066 TYPE_MAIN_VARIANT (rtype) = rtype;
4d00c918 5067 expr = convert (rtype, expr);
5068 expr = build1 (NOP_EXPR, type, expr);
27becfc8 5069 }
4d00c918 5070 else
5071 expr = convert (type, expr);
27becfc8 5072 }
5073
4d00c918 5074 /* If we are converting to an integral type whose precision is not equal
292237f3 5075 to its size, first unchecked convert to a record type that contains a
5076 field of the given precision. Then extract the result from the field.
5077
5078 There is a subtlety if the source type is an aggregate type with reverse
5079 storage order because its representation is not contiguous in the native
5080 storage order, i.e. a direct unchecked conversion to an integral type
5081 with N bits of precision cannot read the first N bits of the aggregate
5082 type. To overcome it, we do an unchecked conversion to an integral type
5083 with reverse storage order and return the resulting value. This also
5084 ensures that the result of the unchecked conversion doesn't depend on
5085 the endianness of the target machine, but only on the storage order of
5086 the aggregate type.
5087
5088 Finally, for the sake of consistency, we do the unchecked conversion
5089 to an integral type with reverse storage order as soon as the source
5090 type is an aggregate type with reverse storage order, even if there
5091 are no considerations of precision or size involved. */
2ca11b4d 5092 else if (INTEGRAL_TYPE_P (type)
5093 && TYPE_RM_SIZE (type)
0353d27b 5094 && (tree_int_cst_compare (TYPE_RM_SIZE (type),
5095 TYPE_SIZE (type)) < 0
292237f3 5096 || (AGGREGATE_TYPE_P (etype)
5097 && TYPE_REVERSE_STORAGE_ORDER (etype))))
27becfc8 5098 {
5099 tree rec_type = make_node (RECORD_TYPE);
f9ae6f95 5100 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (type));
17ac487b 5101 tree field_type, field;
5102
292237f3 5103 if (AGGREGATE_TYPE_P (etype))
5104 TYPE_REVERSE_STORAGE_ORDER (rec_type)
5105 = TYPE_REVERSE_STORAGE_ORDER (etype);
5106
a84cc613 5107 if (type_unsigned_for_rm (type))
17ac487b 5108 field_type = make_unsigned_type (prec);
5109 else
5110 field_type = make_signed_type (prec);
5111 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (type));
5112
5113 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
62092ba6 5114 NULL_TREE, bitsize_zero_node, 1, 0);
27becfc8 5115
62092ba6 5116 finish_record_type (rec_type, field, 1, false);
27becfc8 5117
5118 expr = unchecked_convert (rec_type, expr, notrunc_p);
c860752a 5119 expr = build_component_ref (expr, field, false);
17ac487b 5120 expr = fold_build1 (NOP_EXPR, type, expr);
27becfc8 5121 }
5122
17ac487b 5123 /* Similarly if we are converting from an integral type whose precision is
5124 not equal to its size, first copy into a field of the given precision
292237f3 5125 and unchecked convert the record type.
5126
5127 The same considerations as above apply if the target type is an aggregate
5128 type with reverse storage order and we also proceed similarly. */
2ca11b4d 5129 else if (INTEGRAL_TYPE_P (etype)
5130 && TYPE_RM_SIZE (etype)
0353d27b 5131 && (tree_int_cst_compare (TYPE_RM_SIZE (etype),
5132 TYPE_SIZE (etype)) < 0
292237f3 5133 || (AGGREGATE_TYPE_P (type)
5134 && TYPE_REVERSE_STORAGE_ORDER (type))))
27becfc8 5135 {
5136 tree rec_type = make_node (RECORD_TYPE);
f9ae6f95 5137 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (etype));
f1f41a6c 5138 vec<constructor_elt, va_gc> *v;
5139 vec_alloc (v, 1);
17ac487b 5140 tree field_type, field;
5141
292237f3 5142 if (AGGREGATE_TYPE_P (type))
5143 TYPE_REVERSE_STORAGE_ORDER (rec_type)
5144 = TYPE_REVERSE_STORAGE_ORDER (type);
5145
a84cc613 5146 if (type_unsigned_for_rm (etype))
17ac487b 5147 field_type = make_unsigned_type (prec);
5148 else
5149 field_type = make_signed_type (prec);
5150 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (etype));
5151
5152 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
62092ba6 5153 NULL_TREE, bitsize_zero_node, 1, 0);
27becfc8 5154
62092ba6 5155 finish_record_type (rec_type, field, 1, false);
27becfc8 5156
17ac487b 5157 expr = fold_build1 (NOP_EXPR, field_type, expr);
5cd86571 5158 CONSTRUCTOR_APPEND_ELT (v, field, expr);
5159 expr = gnat_build_constructor (rec_type, v);
27becfc8 5160 expr = unchecked_convert (type, expr, notrunc_p);
5161 }
5162
2ca11b4d 5163 /* If we are converting from a scalar type to a type with a different size,
5164 we need to pad to have the same size on both sides.
5165
5166 ??? We cannot do it unconditionally because unchecked conversions are
5167 used liberally by the front-end to implement polymorphism, e.g. in:
5168
5169 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
5170 return p___size__4 (p__object!(S191s.all));
5171
5172 so we skip all expressions that are references. */
5173 else if (!REFERENCE_CLASS_P (expr)
5174 && !AGGREGATE_TYPE_P (etype)
5175 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5176 && (c = tree_int_cst_compare (TYPE_SIZE (etype), TYPE_SIZE (type))))
5177 {
5178 if (c < 0)
5179 {
5180 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
5181 false, false, false, true),
5182 expr);
5183 expr = unchecked_convert (type, expr, notrunc_p);
5184 }
5185 else
5186 {
5187 tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
5188 false, false, false, true);
5189 expr = unchecked_convert (rec_type, expr, notrunc_p);
c860752a 5190 expr = build_component_ref (expr, TYPE_FIELDS (rec_type), false);
2ca11b4d 5191 }
5192 }
5193
52dd2567 5194 /* We have a special case when we are converting between two unconstrained
5195 array types. In that case, take the address, convert the fat pointer
5196 types, and dereference. */
2b161576 5197 else if (ecode == code && code == UNCONSTRAINED_ARRAY_TYPE)
27becfc8 5198 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
5199 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
5200 build_unary_op (ADDR_EXPR, NULL_TREE,
5201 expr)));
52dd2567 5202
5203 /* Another special case is when we are converting to a vector type from its
5204 representative array type; this a regular conversion. */
2b161576 5205 else if (code == VECTOR_TYPE
5206 && ecode == ARRAY_TYPE
52dd2567 5207 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
5208 etype))
5209 expr = convert (type, expr);
5210
fc07fe6f 5211 /* And, if the array type is not the representative, we try to build an
5212 intermediate vector type of which the array type is the representative
5213 and to do the unchecked conversion between the vector types, in order
5214 to enable further simplifications in the middle-end. */
5215 else if (code == VECTOR_TYPE
5216 && ecode == ARRAY_TYPE
5217 && (tem = build_vector_type_for_array (etype, NULL_TREE)))
5218 {
5219 expr = convert (tem, expr);
5220 return unchecked_convert (type, expr, notrunc_p);
5221 }
5222
62092ba6 5223 /* If we are converting a CONSTRUCTOR to a more aligned RECORD_TYPE, bump
5224 the alignment of the CONSTRUCTOR to speed up the copy operation. */
5225 else if (TREE_CODE (expr) == CONSTRUCTOR
5226 && code == RECORD_TYPE
5227 && TYPE_ALIGN (etype) < TYPE_ALIGN (type))
5228 {
5229 expr = convert (maybe_pad_type (etype, NULL_TREE, TYPE_ALIGN (type),
5230 Empty, false, false, false, true),
5231 expr);
5232 return unchecked_convert (type, expr, notrunc_p);
5233 }
5234
5235 /* Otherwise, just build a VIEW_CONVERT_EXPR of the expression. */
27becfc8 5236 else
5237 {
5238 expr = maybe_unconstrained_array (expr);
5239 etype = TREE_TYPE (expr);
2b161576 5240 ecode = TREE_CODE (etype);
4d00c918 5241 if (can_fold_for_view_convert_p (expr))
5242 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
5243 else
5244 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
27becfc8 5245 }
5246
a84cc613 5247 /* If the result is a non-biased integral type whose precision is not equal
5248 to its size, sign- or zero-extend the result. But we need not do this
5249 if the input is also an integral type and both are unsigned or both are
5250 signed and have the same precision. */
27becfc8 5251 if (!notrunc_p
0353d27b 5252 && INTEGRAL_TYPE_P (type)
a84cc613 5253 && !(code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
0353d27b 5254 && TYPE_RM_SIZE (type)
5255 && tree_int_cst_compare (TYPE_RM_SIZE (type), TYPE_SIZE (type)) < 0
27becfc8 5256 && !(INTEGRAL_TYPE_P (etype)
a84cc613 5257 && type_unsigned_for_rm (type) == type_unsigned_for_rm (etype)
5258 && (type_unsigned_for_rm (type)
5259 || tree_int_cst_compare (TYPE_RM_SIZE (type),
5260 TYPE_RM_SIZE (etype)
5261 ? TYPE_RM_SIZE (etype)
5262 : TYPE_SIZE (etype)) == 0)))
27becfc8 5263 {
2b161576 5264 tree base_type
0353d27b 5265 = gnat_type_for_size (TREE_INT_CST_LOW (TYPE_SIZE (type)),
a84cc613 5266 type_unsigned_for_rm (type));
27becfc8 5267 tree shift_expr
5268 = convert (base_type,
5269 size_binop (MINUS_EXPR,
0353d27b 5270 TYPE_SIZE (type), TYPE_RM_SIZE (type)));
27becfc8 5271 expr
5272 = convert (type,
5273 build_binary_op (RSHIFT_EXPR, base_type,
5274 build_binary_op (LSHIFT_EXPR, base_type,
5275 convert (base_type, expr),
5276 shift_expr),
5277 shift_expr));
5278 }
5279
5280 /* An unchecked conversion should never raise Constraint_Error. The code
5281 below assumes that GCC's conversion routines overflow the same way that
5282 the underlying hardware does. This is probably true. In the rare case
5283 when it is false, we can rely on the fact that such conversions are
5284 erroneous anyway. */
5285 if (TREE_CODE (expr) == INTEGER_CST)
5286 TREE_OVERFLOW (expr) = 0;
5287
5288 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
5289 show no longer constant. */
5290 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
5291 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
5292 OEP_ONLY_CONST))
5293 TREE_CONSTANT (expr) = 0;
5294
5295 return expr;
5296}
5297\f
4880a940 5298/* Return the appropriate GCC tree code for the specified GNAT_TYPE,
27becfc8 5299 the latter being a record type as predicated by Is_Record_Type. */
5300
5301enum tree_code
5302tree_code_for_record_type (Entity_Id gnat_type)
5303{
68e668ce 5304 Node_Id component_list, component;
27becfc8 5305
68e668ce 5306 /* Return UNION_TYPE if it's an Unchecked_Union whose non-discriminant
5307 fields are all in the variant part. Otherwise, return RECORD_TYPE. */
27becfc8 5308 if (!Is_Unchecked_Union (gnat_type))
5309 return RECORD_TYPE;
5310
68e668ce 5311 gnat_type = Implementation_Base_Type (gnat_type);
5312 component_list
5313 = Component_List (Type_Definition (Declaration_Node (gnat_type)));
5314
27becfc8 5315 for (component = First_Non_Pragma (Component_Items (component_list));
5316 Present (component);
5317 component = Next_Non_Pragma (component))
5318 if (Ekind (Defining_Entity (component)) == E_Component)
5319 return RECORD_TYPE;
5320
5321 return UNION_TYPE;
5322}
5323
d4b7e0f5 5324/* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
5325 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
5326 according to the presence of an alignment clause on the type or, if it
5327 is an array, on the component type. */
5328
5329bool
5330is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
5331{
5332 gnat_type = Underlying_Type (gnat_type);
5333
5334 *align_clause = Present (Alignment_Clause (gnat_type));
5335
5336 if (Is_Array_Type (gnat_type))
5337 {
5338 gnat_type = Underlying_Type (Component_Type (gnat_type));
5339 if (Present (Alignment_Clause (gnat_type)))
5340 *align_clause = true;
5341 }
5342
5343 if (!Is_Floating_Point_Type (gnat_type))
5344 return false;
5345
5346 if (UI_To_Int (Esize (gnat_type)) != 64)
5347 return false;
5348
5349 return true;
5350}
5351
5352/* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
5353 size is greater or equal to 64 bits, or an array of such a type. Set
5354 ALIGN_CLAUSE according to the presence of an alignment clause on the
5355 type or, if it is an array, on the component type. */
5356
5357bool
5358is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
5359{
5360 gnat_type = Underlying_Type (gnat_type);
5361
5362 *align_clause = Present (Alignment_Clause (gnat_type));
5363
5364 if (Is_Array_Type (gnat_type))
5365 {
5366 gnat_type = Underlying_Type (Component_Type (gnat_type));
5367 if (Present (Alignment_Clause (gnat_type)))
5368 *align_clause = true;
5369 }
5370
5371 if (!Is_Scalar_Type (gnat_type))
5372 return false;
5373
5374 if (UI_To_Int (Esize (gnat_type)) < 64)
5375 return false;
5376
5377 return true;
5378}
5379
27becfc8 5380/* Return true if GNU_TYPE is suitable as the type of a non-aliased
5381 component of an aggregate type. */
5382
5383bool
5384type_for_nonaliased_component_p (tree gnu_type)
5385{
5386 /* If the type is passed by reference, we may have pointers to the
5387 component so it cannot be made non-aliased. */
5388 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
5389 return false;
5390
5391 /* We used to say that any component of aggregate type is aliased
5392 because the front-end may take 'Reference of it. The front-end
5393 has been enhanced in the meantime so as to use a renaming instead
5394 in most cases, but the back-end can probably take the address of
5395 such a component too so we go for the conservative stance.
5396
5397 For instance, we might need the address of any array type, even
5398 if normally passed by copy, to construct a fat pointer if the
5399 component is used as an actual for an unconstrained formal.
5400
5401 Likewise for record types: even if a specific record subtype is
5402 passed by copy, the parent type might be passed by ref (e.g. if
5403 it's of variable size) and we might take the address of a child
5404 component to pass to a parent formal. We have no way to check
5405 for such conditions here. */
5406 if (AGGREGATE_TYPE_P (gnu_type))
5407 return false;
5408
5409 return true;
5410}
5411
a88c8608 5412/* Return true if TYPE is a smaller form of ORIG_TYPE. */
5413
5414bool
5415smaller_form_type_p (tree type, tree orig_type)
5416{
5417 tree size, osize;
5418
5419 /* We're not interested in variants here. */
5420 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig_type))
5421 return false;
5422
5423 /* Like a variant, a packable version keeps the original TYPE_NAME. */
5424 if (TYPE_NAME (type) != TYPE_NAME (orig_type))
5425 return false;
5426
5427 size = TYPE_SIZE (type);
5428 osize = TYPE_SIZE (orig_type);
5429
5430 if (!(TREE_CODE (size) == INTEGER_CST && TREE_CODE (osize) == INTEGER_CST))
5431 return false;
5432
5433 return tree_int_cst_lt (size, osize) != 0;
5434}
5435
0dfdb37a 5436/* Return whether EXPR, which is the renamed object in an object renaming
5437 declaration, can be materialized as a reference (with a REFERENCE_TYPE).
5438 This should be synchronized with Exp_Dbug.Debug_Renaming_Declaration. */
5439
5440bool
5441can_materialize_object_renaming_p (Node_Id expr)
5442{
5443 while (true)
5444 {
7f7cc984 5445 expr = Original_Node (expr);
5446
0dfdb37a 5447 switch Nkind (expr)
5448 {
5449 case N_Identifier:
5450 case N_Expanded_Name:
7f7cc984 5451 if (!Present (Renamed_Object (Entity (expr))))
5452 return true;
5453 expr = Renamed_Object (Entity (expr));
5454 break;
0dfdb37a 5455
5456 case N_Selected_Component:
5457 {
5458 if (Is_Packed (Underlying_Type (Etype (Prefix (expr)))))
5459 return false;
5460
5461 const Uint bitpos
5462 = Normalized_First_Bit (Entity (Selector_Name (expr)));
5463 if (!UI_Is_In_Int_Range (bitpos)
5464 || (bitpos != UI_No_Uint && bitpos != UI_From_Int (0)))
5465 return false;
5466
5467 expr = Prefix (expr);
5468 break;
5469 }
5470
5471 case N_Indexed_Component:
5472 case N_Slice:
5473 {
5474 const Entity_Id t = Underlying_Type (Etype (Prefix (expr)));
5475
5476 if (Is_Array_Type (t) && Present (Packed_Array_Impl_Type (t)))
5477 return false;
5478
5479 expr = Prefix (expr);
5480 break;
5481 }
5482
5483 case N_Explicit_Dereference:
5484 expr = Prefix (expr);
5485 break;
5486
5487 default:
5488 return true;
5489 };
5490 }
5491}
5492
0c6fd2e5 5493/* Perform final processing on global declarations. */
3a1c9df2 5494
2df77065 5495static GTY (()) tree dummy_global;
5496
27becfc8 5497void
0c6fd2e5 5498gnat_write_global_declarations (void)
27becfc8 5499{
60388043 5500 unsigned int i;
5501 tree iter;
5502
86bfd6f3 5503 /* If we have declared types as used at the global level, insert them in
0617d4e0 5504 the global hash table. We use a dummy variable for this purpose, but
5505 we need to build it unconditionally to avoid -fcompare-debug issues. */
5506 if (first_global_object_name)
86bfd6f3 5507 {
a2862f76 5508 struct varpool_node *node;
a7688838 5509 char *label;
5510
5511 ASM_FORMAT_PRIVATE_NAME (label, first_global_object_name, 0);
86bfd6f3 5512 dummy_global
a7688838 5513 = build_decl (BUILTINS_LOCATION, VAR_DECL, get_identifier (label),
5514 void_type_node);
861daf9d 5515 DECL_HARD_REGISTER (dummy_global) = 1;
86bfd6f3 5516 TREE_STATIC (dummy_global) = 1;
727d4825 5517 node = varpool_node::get_create (dummy_global);
861daf9d 5518 node->definition = 1;
02774f2d 5519 node->force_output = 1;
86bfd6f3 5520
0617d4e0 5521 if (types_used_by_cur_var_decl)
5522 while (!types_used_by_cur_var_decl->is_empty ())
5523 {
5524 tree t = types_used_by_cur_var_decl->pop ();
5525 types_used_by_var_decl_insert (t, dummy_global);
5526 }
86bfd6f3 5527 }
5528
0c6fd2e5 5529 /* Output debug information for all global type declarations first. This
5530 ensures that global types whose compilation hasn't been finalized yet,
5531 for example pointers to Taft amendment types, have their compilation
5532 finalized in the right context. */
5533 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5534 if (TREE_CODE (iter) == TYPE_DECL && !DECL_IGNORED_P (iter))
a11ea431 5535 debug_hooks->type_decl (iter, false);
0c6fd2e5 5536
288405ec 5537 /* Output imported functions. */
5538 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5539 if (TREE_CODE (iter) == FUNCTION_DECL
5540 && DECL_EXTERNAL (iter)
5541 && DECL_INITIAL (iter) == NULL
5542 && !DECL_IGNORED_P (iter)
5543 && DECL_FUNCTION_IS_DEF (iter))
5544 debug_hooks->early_global_decl (iter);
5545
0c6fd2e5 5546 /* Then output the global variables. We need to do that after the debug
288405ec 5547 information for global types is emitted so that they are finalized. Skip
5548 external global variables, unless we need to emit debug info for them:
5549 this is useful for imported variables, for instance. */
0c6fd2e5 5550 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
288405ec 5551 if (TREE_CODE (iter) == VAR_DECL
5552 && (!DECL_EXTERNAL (iter) || !DECL_IGNORED_P (iter)))
0c6fd2e5 5553 rest_of_decl_compilation (iter, true, 0);
1ef426cd 5554
5555 /* Output the imported modules/declarations. In GNAT, these are only
5556 materializing subprogram. */
5557 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5558 if (TREE_CODE (iter) == IMPORTED_DECL && !DECL_IGNORED_P (iter))
5559 debug_hooks->imported_module_or_decl (iter, DECL_NAME (iter),
41d01e67 5560 DECL_CONTEXT (iter), false, false);
27becfc8 5561}
5562
5563/* ************************************************************************
5564 * * GCC builtins support *
5565 * ************************************************************************ */
5566
5567/* The general scheme is fairly simple:
5568
5569 For each builtin function/type to be declared, gnat_install_builtins calls
a10d3a24 5570 internal facilities which eventually get to gnat_pushdecl, which in turn
27becfc8 5571 tracks the so declared builtin function decls in the 'builtin_decls' global
5572 datastructure. When an Intrinsic subprogram declaration is processed, we
5573 search this global datastructure to retrieve the associated BUILT_IN DECL
5574 node. */
5575
5576/* Search the chain of currently available builtin declarations for a node
5577 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
5578 found, if any, or NULL_TREE otherwise. */
5579tree
5580builtin_decl_for (tree name)
5581{
5582 unsigned i;
5583 tree decl;
5584
f1f41a6c 5585 FOR_EACH_VEC_SAFE_ELT (builtin_decls, i, decl)
27becfc8 5586 if (DECL_NAME (decl) == name)
5587 return decl;
5588
5589 return NULL_TREE;
5590}
5591
5592/* The code below eventually exposes gnat_install_builtins, which declares
5593 the builtin types and functions we might need, either internally or as
5594 user accessible facilities.
5595
5596 ??? This is a first implementation shot, still in rough shape. It is
5597 heavily inspired from the "C" family implementation, with chunks copied
5598 verbatim from there.
5599
9ac7350e 5600 Two obvious improvement candidates are:
27becfc8 5601 o Use a more efficient name/decl mapping scheme
5602 o Devise a middle-end infrastructure to avoid having to copy
5603 pieces between front-ends. */
5604
5605/* ----------------------------------------------------------------------- *
5606 * BUILTIN ELEMENTARY TYPES *
5607 * ----------------------------------------------------------------------- */
5608
5609/* Standard data types to be used in builtin argument declarations. */
5610
5611enum c_tree_index
5612{
5613 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
5614 CTI_STRING_TYPE,
5615 CTI_CONST_STRING_TYPE,
5616
5617 CTI_MAX
5618};
5619
5620static tree c_global_trees[CTI_MAX];
5621
5622#define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
5623#define string_type_node c_global_trees[CTI_STRING_TYPE]
5624#define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
5625
5626/* ??? In addition some attribute handlers, we currently don't support a
5627 (small) number of builtin-types, which in turns inhibits support for a
5628 number of builtin functions. */
5629#define wint_type_node void_type_node
5630#define intmax_type_node void_type_node
5631#define uintmax_type_node void_type_node
5632
27becfc8 5633/* Used to help initialize the builtin-types.def table. When a type of
5634 the correct size doesn't exist, use error_mark_node instead of NULL.
5635 The later results in segfaults even when a decl using the type doesn't
5636 get invoked. */
5637
5638static tree
5639builtin_type_for_size (int size, bool unsignedp)
5640{
9bfb1138 5641 tree type = gnat_type_for_size (size, unsignedp);
27becfc8 5642 return type ? type : error_mark_node;
5643}
5644
5645/* Build/push the elementary type decls that builtin functions/types
5646 will need. */
5647
5648static void
5649install_builtin_elementary_types (void)
5650{
0353d27b 5651 signed_size_type_node = gnat_signed_type_for (size_type_node);
27becfc8 5652 pid_type_node = integer_type_node;
27becfc8 5653
5654 string_type_node = build_pointer_type (char_type_node);
5655 const_string_type_node
5656 = build_pointer_type (build_qualified_type
5657 (char_type_node, TYPE_QUAL_CONST));
5658}
5659
5660/* ----------------------------------------------------------------------- *
5661 * BUILTIN FUNCTION TYPES *
5662 * ----------------------------------------------------------------------- */
5663
5664/* Now, builtin function types per se. */
5665
5666enum c_builtin_type
5667{
5668#define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
5669#define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
5670#define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
5671#define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
5672#define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5673#define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5674#define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
3c77ca67 5675#define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5676 ARG6) NAME,
5677#define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5678 ARG6, ARG7) NAME,
5679#define DEF_FUNCTION_TYPE_8(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5680 ARG6, ARG7, ARG8) NAME,
43895be5 5681#define DEF_FUNCTION_TYPE_9(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5682 ARG6, ARG7, ARG8, ARG9) NAME,
5683#define DEF_FUNCTION_TYPE_10(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5684 ARG6, ARG7, ARG8, ARG9, ARG10) NAME,
5685#define DEF_FUNCTION_TYPE_11(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5686 ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) NAME,
27becfc8 5687#define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
5688#define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
5689#define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
5690#define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5691#define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
3c77ca67 5692#define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
6349b8cc 5693 NAME,
e561d5e1 5694#define DEF_FUNCTION_TYPE_VAR_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5695 ARG6) NAME,
6349b8cc 5696#define DEF_FUNCTION_TYPE_VAR_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5697 ARG6, ARG7) NAME,
27becfc8 5698#define DEF_POINTER_TYPE(NAME, TYPE) NAME,
5699#include "builtin-types.def"
5700#undef DEF_PRIMITIVE_TYPE
5701#undef DEF_FUNCTION_TYPE_0
5702#undef DEF_FUNCTION_TYPE_1
5703#undef DEF_FUNCTION_TYPE_2
5704#undef DEF_FUNCTION_TYPE_3
5705#undef DEF_FUNCTION_TYPE_4
5706#undef DEF_FUNCTION_TYPE_5
5707#undef DEF_FUNCTION_TYPE_6
5708#undef DEF_FUNCTION_TYPE_7
bc7bff74 5709#undef DEF_FUNCTION_TYPE_8
43895be5 5710#undef DEF_FUNCTION_TYPE_9
5711#undef DEF_FUNCTION_TYPE_10
5712#undef DEF_FUNCTION_TYPE_11
27becfc8 5713#undef DEF_FUNCTION_TYPE_VAR_0
5714#undef DEF_FUNCTION_TYPE_VAR_1
5715#undef DEF_FUNCTION_TYPE_VAR_2
5716#undef DEF_FUNCTION_TYPE_VAR_3
5717#undef DEF_FUNCTION_TYPE_VAR_4
5718#undef DEF_FUNCTION_TYPE_VAR_5
e561d5e1 5719#undef DEF_FUNCTION_TYPE_VAR_6
6349b8cc 5720#undef DEF_FUNCTION_TYPE_VAR_7
27becfc8 5721#undef DEF_POINTER_TYPE
5722 BT_LAST
5723};
5724
5725typedef enum c_builtin_type builtin_type;
5726
5727/* A temporary array used in communication with def_fn_type. */
5728static GTY(()) tree builtin_types[(int) BT_LAST + 1];
5729
5730/* A helper function for install_builtin_types. Build function type
5731 for DEF with return type RET and N arguments. If VAR is true, then the
5732 function should be variadic after those N arguments.
5733
5734 Takes special care not to ICE if any of the types involved are
5735 error_mark_node, which indicates that said type is not in fact available
5736 (see builtin_type_for_size). In which case the function type as a whole
5737 should be error_mark_node. */
5738
5739static void
5740def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
5741{
ce497e33 5742 tree t;
5743 tree *args = XALLOCAVEC (tree, n);
27becfc8 5744 va_list list;
5745 int i;
5746
5747 va_start (list, n);
5748 for (i = 0; i < n; ++i)
5749 {
c88e6a4f 5750 builtin_type a = (builtin_type) va_arg (list, int);
27becfc8 5751 t = builtin_types[a];
5752 if (t == error_mark_node)
5753 goto egress;
ce497e33 5754 args[i] = t;
27becfc8 5755 }
27becfc8 5756
27becfc8 5757 t = builtin_types[ret];
5758 if (t == error_mark_node)
5759 goto egress;
ce497e33 5760 if (var)
5761 t = build_varargs_function_type_array (t, n, args);
5762 else
5763 t = build_function_type_array (t, n, args);
27becfc8 5764
5765 egress:
5766 builtin_types[def] = t;
451c8e2f 5767 va_end (list);
27becfc8 5768}
5769
5770/* Build the builtin function types and install them in the builtin_types
5771 array for later use in builtin function decls. */
5772
5773static void
5774install_builtin_function_types (void)
5775{
5776 tree va_list_ref_type_node;
5777 tree va_list_arg_type_node;
5778
5779 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
5780 {
5781 va_list_arg_type_node = va_list_ref_type_node =
5782 build_pointer_type (TREE_TYPE (va_list_type_node));
5783 }
5784 else
5785 {
5786 va_list_arg_type_node = va_list_type_node;
5787 va_list_ref_type_node = build_reference_type (va_list_type_node);
5788 }
5789
5790#define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
5791 builtin_types[ENUM] = VALUE;
5792#define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
5793 def_fn_type (ENUM, RETURN, 0, 0);
5794#define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
5795 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
5796#define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
5797 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
5798#define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5799 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
5800#define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5801 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
5802#define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5803 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5804#define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5805 ARG6) \
5806 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5807#define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5808 ARG6, ARG7) \
5809 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
bc7bff74 5810#define DEF_FUNCTION_TYPE_8(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5811 ARG6, ARG7, ARG8) \
5812 def_fn_type (ENUM, RETURN, 0, 8, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5813 ARG7, ARG8);
43895be5 5814#define DEF_FUNCTION_TYPE_9(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5815 ARG6, ARG7, ARG8, ARG9) \
5816 def_fn_type (ENUM, RETURN, 0, 9, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5817 ARG7, ARG8, ARG9);
5818#define DEF_FUNCTION_TYPE_10(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5,\
5819 ARG6, ARG7, ARG8, ARG9, ARG10) \
5820 def_fn_type (ENUM, RETURN, 0, 10, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5821 ARG7, ARG8, ARG9, ARG10);
5822#define DEF_FUNCTION_TYPE_11(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5,\
5823 ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) \
5824 def_fn_type (ENUM, RETURN, 0, 11, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5825 ARG7, ARG8, ARG9, ARG10, ARG11);
27becfc8 5826#define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
5827 def_fn_type (ENUM, RETURN, 1, 0);
5828#define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
5829 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
5830#define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
5831 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
5832#define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5833 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
5834#define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5835 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
5836#define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5837 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
e561d5e1 5838#define DEF_FUNCTION_TYPE_VAR_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5839 ARG6) \
5840 def_fn_type (ENUM, RETURN, 1, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
6349b8cc 5841#define DEF_FUNCTION_TYPE_VAR_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5842 ARG6, ARG7) \
5843 def_fn_type (ENUM, RETURN, 1, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
27becfc8 5844#define DEF_POINTER_TYPE(ENUM, TYPE) \
5845 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
5846
5847#include "builtin-types.def"
5848
5849#undef DEF_PRIMITIVE_TYPE
3c77ca67 5850#undef DEF_FUNCTION_TYPE_0
27becfc8 5851#undef DEF_FUNCTION_TYPE_1
5852#undef DEF_FUNCTION_TYPE_2
5853#undef DEF_FUNCTION_TYPE_3
5854#undef DEF_FUNCTION_TYPE_4
5855#undef DEF_FUNCTION_TYPE_5
5856#undef DEF_FUNCTION_TYPE_6
3c77ca67 5857#undef DEF_FUNCTION_TYPE_7
5858#undef DEF_FUNCTION_TYPE_8
43895be5 5859#undef DEF_FUNCTION_TYPE_9
5860#undef DEF_FUNCTION_TYPE_10
5861#undef DEF_FUNCTION_TYPE_11
27becfc8 5862#undef DEF_FUNCTION_TYPE_VAR_0
5863#undef DEF_FUNCTION_TYPE_VAR_1
5864#undef DEF_FUNCTION_TYPE_VAR_2
5865#undef DEF_FUNCTION_TYPE_VAR_3
5866#undef DEF_FUNCTION_TYPE_VAR_4
5867#undef DEF_FUNCTION_TYPE_VAR_5
e561d5e1 5868#undef DEF_FUNCTION_TYPE_VAR_6
6349b8cc 5869#undef DEF_FUNCTION_TYPE_VAR_7
27becfc8 5870#undef DEF_POINTER_TYPE
5871 builtin_types[(int) BT_LAST] = NULL_TREE;
5872}
5873
5874/* ----------------------------------------------------------------------- *
5875 * BUILTIN ATTRIBUTES *
5876 * ----------------------------------------------------------------------- */
5877
5878enum built_in_attribute
5879{
5880#define DEF_ATTR_NULL_TREE(ENUM) ENUM,
5881#define DEF_ATTR_INT(ENUM, VALUE) ENUM,
c8010b80 5882#define DEF_ATTR_STRING(ENUM, VALUE) ENUM,
27becfc8 5883#define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
5884#define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
5885#include "builtin-attrs.def"
5886#undef DEF_ATTR_NULL_TREE
5887#undef DEF_ATTR_INT
c8010b80 5888#undef DEF_ATTR_STRING
27becfc8 5889#undef DEF_ATTR_IDENT
5890#undef DEF_ATTR_TREE_LIST
5891 ATTR_LAST
5892};
5893
5894static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
5895
5896static void
5897install_builtin_attributes (void)
5898{
5899 /* Fill in the built_in_attributes array. */
5900#define DEF_ATTR_NULL_TREE(ENUM) \
5901 built_in_attributes[(int) ENUM] = NULL_TREE;
5902#define DEF_ATTR_INT(ENUM, VALUE) \
5903 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
c8010b80 5904#define DEF_ATTR_STRING(ENUM, VALUE) \
5905 built_in_attributes[(int) ENUM] = build_string (strlen (VALUE), VALUE);
27becfc8 5906#define DEF_ATTR_IDENT(ENUM, STRING) \
5907 built_in_attributes[(int) ENUM] = get_identifier (STRING);
5908#define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5909 built_in_attributes[(int) ENUM] \
5910 = tree_cons (built_in_attributes[(int) PURPOSE], \
5911 built_in_attributes[(int) VALUE], \
5912 built_in_attributes[(int) CHAIN]);
5913#include "builtin-attrs.def"
5914#undef DEF_ATTR_NULL_TREE
5915#undef DEF_ATTR_INT
c8010b80 5916#undef DEF_ATTR_STRING
27becfc8 5917#undef DEF_ATTR_IDENT
5918#undef DEF_ATTR_TREE_LIST
5919}
5920
5921/* Handle a "const" attribute; arguments as in
5922 struct attribute_spec.handler. */
5923
5924static tree
5925handle_const_attribute (tree *node, tree ARG_UNUSED (name),
5926 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5927 bool *no_add_attrs)
5928{
5929 if (TREE_CODE (*node) == FUNCTION_DECL)
5930 TREE_READONLY (*node) = 1;
5931 else
5932 *no_add_attrs = true;
5933
5934 return NULL_TREE;
5935}
5936
5937/* Handle a "nothrow" attribute; arguments as in
5938 struct attribute_spec.handler. */
5939
5940static tree
5941handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
5942 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5943 bool *no_add_attrs)
5944{
5945 if (TREE_CODE (*node) == FUNCTION_DECL)
5946 TREE_NOTHROW (*node) = 1;
5947 else
5948 *no_add_attrs = true;
5949
5950 return NULL_TREE;
5951}
5952
5953/* Handle a "pure" attribute; arguments as in
5954 struct attribute_spec.handler. */
5955
5956static tree
5957handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5958 int ARG_UNUSED (flags), bool *no_add_attrs)
5959{
5960 if (TREE_CODE (*node) == FUNCTION_DECL)
5961 DECL_PURE_P (*node) = 1;
9ac7350e 5962 /* TODO: support types. */
27becfc8 5963 else
5964 {
52dd2567 5965 warning (OPT_Wattributes, "%qs attribute ignored",
5966 IDENTIFIER_POINTER (name));
27becfc8 5967 *no_add_attrs = true;
5968 }
5969
5970 return NULL_TREE;
5971}
5972
5973/* Handle a "no vops" attribute; arguments as in
5974 struct attribute_spec.handler. */
5975
5976static tree
5977handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
5978 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5979 bool *ARG_UNUSED (no_add_attrs))
5980{
5981 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
5982 DECL_IS_NOVOPS (*node) = 1;
5983 return NULL_TREE;
5984}
5985
5986/* Helper for nonnull attribute handling; fetch the operand number
5987 from the attribute argument list. */
5988
5989static bool
5990get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
5991{
5992 /* Verify the arg number is a constant. */
e1d65c9f 5993 if (!tree_fits_uhwi_p (arg_num_expr))
27becfc8 5994 return false;
5995
f9ae6f95 5996 *valp = TREE_INT_CST_LOW (arg_num_expr);
27becfc8 5997 return true;
5998}
5999
6000/* Handle the "nonnull" attribute. */
6001static tree
6002handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
6003 tree args, int ARG_UNUSED (flags),
6004 bool *no_add_attrs)
6005{
6006 tree type = *node;
6007 unsigned HOST_WIDE_INT attr_arg_num;
6008
6009 /* If no arguments are specified, all pointer arguments should be
6010 non-null. Verify a full prototype is given so that the arguments
1dc4d519 6011 will have the correct types when we actually check them later.
6012 Avoid diagnosing type-generic built-ins since those have no
6013 prototype. */
27becfc8 6014 if (!args)
6015 {
1dc4d519 6016 if (!prototype_p (type)
6017 && (!TYPE_ATTRIBUTES (type)
6018 || !lookup_attribute ("type generic", TYPE_ATTRIBUTES (type))))
27becfc8 6019 {
6020 error ("nonnull attribute without arguments on a non-prototype");
6021 *no_add_attrs = true;
6022 }
6023 return NULL_TREE;
6024 }
6025
6026 /* Argument list specified. Verify that each argument number references
6027 a pointer argument. */
6028 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
6029 {
27becfc8 6030 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
6031
6032 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
6033 {
6034 error ("nonnull argument has invalid operand number (argument %lu)",
6035 (unsigned long) attr_arg_num);
6036 *no_add_attrs = true;
6037 return NULL_TREE;
6038 }
6039
3e33edbe 6040 if (prototype_p (type))
27becfc8 6041 {
3e33edbe 6042 function_args_iterator iter;
6043 tree argument;
6044
6045 function_args_iter_init (&iter, type);
6046 for (ck_num = 1; ; ck_num++, function_args_iter_next (&iter))
27becfc8 6047 {
3e33edbe 6048 argument = function_args_iter_cond (&iter);
27becfc8 6049 if (!argument || ck_num == arg_num)
6050 break;
27becfc8 6051 }
6052
6053 if (!argument
3e33edbe 6054 || TREE_CODE (argument) == VOID_TYPE)
27becfc8 6055 {
ac45dde2 6056 error ("nonnull argument with out-of-range operand number "
6057 "(argument %lu, operand %lu)",
27becfc8 6058 (unsigned long) attr_arg_num, (unsigned long) arg_num);
6059 *no_add_attrs = true;
6060 return NULL_TREE;
6061 }
6062
3e33edbe 6063 if (TREE_CODE (argument) != POINTER_TYPE)
27becfc8 6064 {
ac45dde2 6065 error ("nonnull argument references non-pointer operand "
6066 "(argument %lu, operand %lu)",
27becfc8 6067 (unsigned long) attr_arg_num, (unsigned long) arg_num);
6068 *no_add_attrs = true;
6069 return NULL_TREE;
6070 }
6071 }
6072 }
6073
6074 return NULL_TREE;
6075}
6076
6077/* Handle a "sentinel" attribute. */
6078
6079static tree
6080handle_sentinel_attribute (tree *node, tree name, tree args,
6081 int ARG_UNUSED (flags), bool *no_add_attrs)
6082{
a36cf284 6083 if (!prototype_p (*node))
27becfc8 6084 {
6085 warning (OPT_Wattributes,
52dd2567 6086 "%qs attribute requires prototypes with named arguments",
6087 IDENTIFIER_POINTER (name));
27becfc8 6088 *no_add_attrs = true;
6089 }
6090 else
6091 {
c33080b9 6092 if (!stdarg_p (*node))
27becfc8 6093 {
6094 warning (OPT_Wattributes,
52dd2567 6095 "%qs attribute only applies to variadic functions",
6096 IDENTIFIER_POINTER (name));
27becfc8 6097 *no_add_attrs = true;
6098 }
6099 }
6100
6101 if (args)
6102 {
6103 tree position = TREE_VALUE (args);
6104
6105 if (TREE_CODE (position) != INTEGER_CST)
6106 {
6107 warning (0, "requested position is not an integer constant");
6108 *no_add_attrs = true;
6109 }
6110 else
6111 {
6112 if (tree_int_cst_lt (position, integer_zero_node))
6113 {
6114 warning (0, "requested position is less than zero");
6115 *no_add_attrs = true;
6116 }
6117 }
6118 }
6119
6120 return NULL_TREE;
6121}
6122
6123/* Handle a "noreturn" attribute; arguments as in
6124 struct attribute_spec.handler. */
6125
6126static tree
6127handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6128 int ARG_UNUSED (flags), bool *no_add_attrs)
6129{
6130 tree type = TREE_TYPE (*node);
6131
6132 /* See FIXME comment in c_common_attribute_table. */
6133 if (TREE_CODE (*node) == FUNCTION_DECL)
6134 TREE_THIS_VOLATILE (*node) = 1;
6135 else if (TREE_CODE (type) == POINTER_TYPE
6136 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
6137 TREE_TYPE (*node)
6138 = build_pointer_type
6139 (build_type_variant (TREE_TYPE (type),
6140 TYPE_READONLY (TREE_TYPE (type)), 1));
6141 else
6142 {
52dd2567 6143 warning (OPT_Wattributes, "%qs attribute ignored",
6144 IDENTIFIER_POINTER (name));
27becfc8 6145 *no_add_attrs = true;
6146 }
02e0316b 6147
6148 return NULL_TREE;
6149}
6150
6151/* Handle a "noinline" attribute; arguments as in
6152 struct attribute_spec.handler. */
6153
6154static tree
6155handle_noinline_attribute (tree *node, tree name,
6156 tree ARG_UNUSED (args),
6157 int ARG_UNUSED (flags), bool *no_add_attrs)
6158{
6159 if (TREE_CODE (*node) == FUNCTION_DECL)
6160 {
6161 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (*node)))
6162 {
6163 warning (OPT_Wattributes, "%qE attribute ignored due to conflict "
6164 "with attribute %qs", name, "always_inline");
6165 *no_add_attrs = true;
6166 }
6167 else
6168 DECL_UNINLINABLE (*node) = 1;
6169 }
6170 else
6171 {
6172 warning (OPT_Wattributes, "%qE attribute ignored", name);
6173 *no_add_attrs = true;
6174 }
6175
6176 return NULL_TREE;
6177}
6178
6179/* Handle a "noclone" attribute; arguments as in
6180 struct attribute_spec.handler. */
6181
6182static tree
6183handle_noclone_attribute (tree *node, tree name,
6184 tree ARG_UNUSED (args),
6185 int ARG_UNUSED (flags), bool *no_add_attrs)
6186{
6187 if (TREE_CODE (*node) != FUNCTION_DECL)
6188 {
6189 warning (OPT_Wattributes, "%qE attribute ignored", name);
6190 *no_add_attrs = true;
6191 }
27becfc8 6192
6193 return NULL_TREE;
6194}
6195
4a68fd3c 6196/* Handle a "leaf" attribute; arguments as in
6197 struct attribute_spec.handler. */
6198
6199static tree
9fac98bb 6200handle_leaf_attribute (tree *node, tree name, tree ARG_UNUSED (args),
4a68fd3c 6201 int ARG_UNUSED (flags), bool *no_add_attrs)
6202{
6203 if (TREE_CODE (*node) != FUNCTION_DECL)
6204 {
6205 warning (OPT_Wattributes, "%qE attribute ignored", name);
6206 *no_add_attrs = true;
6207 }
6208 if (!TREE_PUBLIC (*node))
6209 {
c4540f0c 6210 warning (OPT_Wattributes, "%qE attribute has no effect", name);
4a68fd3c 6211 *no_add_attrs = true;
9fac98bb 6212 }
6213
6214 return NULL_TREE;
6215}
6216
6217/* Handle a "always_inline" attribute; arguments as in
6218 struct attribute_spec.handler. */
6219
6220static tree
6221handle_always_inline_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6222 int ARG_UNUSED (flags), bool *no_add_attrs)
6223{
6224 if (TREE_CODE (*node) == FUNCTION_DECL)
6225 {
6226 /* Set the attribute and mark it for disregarding inline limits. */
6227 DECL_DISREGARD_INLINE_LIMITS (*node) = 1;
6228 }
6229 else
6230 {
6231 warning (OPT_Wattributes, "%qE attribute ignored", name);
6232 *no_add_attrs = true;
4a68fd3c 6233 }
6234
6235 return NULL_TREE;
6236}
6237
27becfc8 6238/* Handle a "malloc" attribute; arguments as in
6239 struct attribute_spec.handler. */
6240
6241static tree
6242handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6243 int ARG_UNUSED (flags), bool *no_add_attrs)
6244{
6245 if (TREE_CODE (*node) == FUNCTION_DECL
6246 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
6247 DECL_IS_MALLOC (*node) = 1;
6248 else
6249 {
52dd2567 6250 warning (OPT_Wattributes, "%qs attribute ignored",
6251 IDENTIFIER_POINTER (name));
27becfc8 6252 *no_add_attrs = true;
6253 }
6254
6255 return NULL_TREE;
6256}
6257
6258/* Fake handler for attributes we don't properly support. */
6259
6260tree
6261fake_attribute_handler (tree * ARG_UNUSED (node),
6262 tree ARG_UNUSED (name),
6263 tree ARG_UNUSED (args),
6264 int ARG_UNUSED (flags),
6265 bool * ARG_UNUSED (no_add_attrs))
6266{
6267 return NULL_TREE;
6268}
6269
6270/* Handle a "type_generic" attribute. */
6271
6272static tree
6273handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
6274 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6275 bool * ARG_UNUSED (no_add_attrs))
6276{
27becfc8 6277 /* Ensure we have a function type. */
6278 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
153edb51 6279
27becfc8 6280 /* Ensure we have a variadic function. */
c33080b9 6281 gcc_assert (!prototype_p (*node) || stdarg_p (*node));
27becfc8 6282
6283 return NULL_TREE;
6284}
6285
5595eac6 6286/* Handle a "vector_size" attribute; arguments as in
6287 struct attribute_spec.handler. */
6288
6289static tree
6290handle_vector_size_attribute (tree *node, tree name, tree args,
fc07fe6f 6291 int ARG_UNUSED (flags), bool *no_add_attrs)
5595eac6 6292{
fc07fe6f 6293 tree type = *node;
6294 tree vector_type;
5595eac6 6295
6296 *no_add_attrs = true;
6297
5595eac6 6298 /* We need to provide for vector pointers, vector arrays, and
6299 functions returning vectors. For example:
6300
6301 __attribute__((vector_size(16))) short *foo;
6302
6303 In this case, the mode is SI, but the type being modified is
6304 HI, so we need to look further. */
5595eac6 6305 while (POINTER_TYPE_P (type)
6306 || TREE_CODE (type) == FUNCTION_TYPE
e00e17ad 6307 || TREE_CODE (type) == ARRAY_TYPE)
5595eac6 6308 type = TREE_TYPE (type);
6309
fc07fe6f 6310 vector_type = build_vector_type_for_size (type, TREE_VALUE (args), name);
6311 if (!vector_type)
6312 return NULL_TREE;
5595eac6 6313
6314 /* Build back pointers if needed. */
fc07fe6f 6315 *node = reconstruct_complex_type (*node, vector_type);
5595eac6 6316
6317 return NULL_TREE;
6318}
6319
52dd2567 6320/* Handle a "vector_type" attribute; arguments as in
6321 struct attribute_spec.handler. */
6322
6323static tree
6324handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
fc07fe6f 6325 int ARG_UNUSED (flags), bool *no_add_attrs)
52dd2567 6326{
fc07fe6f 6327 tree type = *node;
6328 tree vector_type;
52dd2567 6329
6330 *no_add_attrs = true;
6331
fc07fe6f 6332 if (TREE_CODE (type) != ARRAY_TYPE)
52dd2567 6333 {
6334 error ("attribute %qs applies to array types only",
6335 IDENTIFIER_POINTER (name));
6336 return NULL_TREE;
6337 }
6338
fc07fe6f 6339 vector_type = build_vector_type_for_array (type, name);
6340 if (!vector_type)
52dd2567 6341 return NULL_TREE;
6342
fc07fe6f 6343 TYPE_REPRESENTATIVE_ARRAY (vector_type) = type;
6344 *node = vector_type;
52dd2567 6345
6346 return NULL_TREE;
6347}
6348
27becfc8 6349/* ----------------------------------------------------------------------- *
6350 * BUILTIN FUNCTIONS *
6351 * ----------------------------------------------------------------------- */
6352
6353/* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
6354 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
6355 if nonansi_p and flag_no_nonansi_builtin. */
6356
6357static void
6358def_builtin_1 (enum built_in_function fncode,
6359 const char *name,
6360 enum built_in_class fnclass,
6361 tree fntype, tree libtype,
6362 bool both_p, bool fallback_p,
6363 bool nonansi_p ATTRIBUTE_UNUSED,
6364 tree fnattrs, bool implicit_p)
6365{
6366 tree decl;
6367 const char *libname;
6368
6369 /* Preserve an already installed decl. It most likely was setup in advance
6370 (e.g. as part of the internal builtins) for specific reasons. */
ea780bd9 6371 if (builtin_decl_explicit (fncode))
27becfc8 6372 return;
6373
6374 gcc_assert ((!both_p && !fallback_p)
6375 || !strncmp (name, "__builtin_",
6376 strlen ("__builtin_")));
6377
6378 libname = name + strlen ("__builtin_");
6379 decl = add_builtin_function (name, fntype, fncode, fnclass,
6380 (fallback_p ? libname : NULL),
6381 fnattrs);
6382 if (both_p)
6383 /* ??? This is normally further controlled by command-line options
6384 like -fno-builtin, but we don't have them for Ada. */
6385 add_builtin_function (libname, libtype, fncode, fnclass,
6386 NULL, fnattrs);
6387
b9a16870 6388 set_builtin_decl (fncode, decl, implicit_p);
27becfc8 6389}
6390
6391static int flag_isoc94 = 0;
6392static int flag_isoc99 = 0;
060fc206 6393static int flag_isoc11 = 0;
27becfc8 6394
6395/* Install what the common builtins.def offers. */
6396
6397static void
6398install_builtin_functions (void)
6399{
6400#define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
6401 NONANSI_P, ATTRS, IMPLICIT, COND) \
6402 if (NAME && COND) \
6403 def_builtin_1 (ENUM, NAME, CLASS, \
6404 builtin_types[(int) TYPE], \
6405 builtin_types[(int) LIBTYPE], \
6406 BOTH_P, FALLBACK_P, NONANSI_P, \
6407 built_in_attributes[(int) ATTRS], IMPLICIT);
6408#include "builtins.def"
27becfc8 6409}
6410
6411/* ----------------------------------------------------------------------- *
6412 * BUILTIN FUNCTIONS *
6413 * ----------------------------------------------------------------------- */
6414
6415/* Install the builtin functions we might need. */
6416
6417void
6418gnat_install_builtins (void)
6419{
6420 install_builtin_elementary_types ();
6421 install_builtin_function_types ();
6422 install_builtin_attributes ();
6423
6424 /* Install builtins used by generic middle-end pieces first. Some of these
6425 know about internal specificities and control attributes accordingly, for
6426 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
6427 the generic definition from builtins.def. */
471eff36 6428 build_common_builtin_nodes ();
27becfc8 6429
6430 /* Now, install the target specific builtins, such as the AltiVec family on
6431 ppc, and the common set as exposed by builtins.def. */
6432 targetm.init_builtins ();
6433 install_builtin_functions ();
6434}
6435
6436#include "gt-ada-utils.h"
6437#include "gtype-ada.h"