]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ada/gcc-interface/utils.c
cuintp.c (UI_From_gnu): Use tree_to_shwi.
[thirdparty/gcc.git] / gcc / ada / gcc-interface / utils.c
CommitLineData
a1ab4c31
AC
1/****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
0e24192c 9 * Copyright (C) 1992-2013, Free Software Foundation, Inc. *
a1ab4c31
AC
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
25
a1ab4c31
AC
26#include "config.h"
27#include "system.h"
28#include "coretypes.h"
29#include "tm.h"
30#include "tree.h"
d8a2d370
DN
31#include "stringpool.h"
32#include "stor-layout.h"
33#include "attribs.h"
34#include "varasm.h"
a1ab4c31 35#include "flags.h"
a1ab4c31 36#include "toplev.h"
718f9c0f 37#include "diagnostic-core.h"
a1ab4c31
AC
38#include "output.h"
39#include "ggc.h"
40#include "debug.h"
41#include "convert.h"
42#include "target.h"
677f3fa8 43#include "common/common-target.h"
8713b7e4 44#include "langhooks.h"
a1ab4c31 45#include "cgraph.h"
10e4d056 46#include "diagnostic.h"
7ee2468b 47#include "timevar.h"
8713b7e4 48#include "tree-dump.h"
a1ab4c31
AC
49#include "tree-inline.h"
50#include "tree-iterator.h"
a1ab4c31
AC
51
52#include "ada.h"
53#include "types.h"
54#include "atree.h"
55#include "elists.h"
56#include "namet.h"
57#include "nlists.h"
58#include "stringt.h"
59#include "uintp.h"
60#include "fe.h"
61#include "sinfo.h"
62#include "einfo.h"
63#include "ada-tree.h"
64#include "gigi.h"
65
a1ab4c31
AC
66/* If nonzero, pretend we are allocating at global level. */
67int force_global;
68
caa9d12a
EB
69/* The default alignment of "double" floating-point types, i.e. floating
70 point types whose size is equal to 64 bits, or 0 if this alignment is
71 not specifically capped. */
72int double_float_alignment;
73
74/* The default alignment of "double" or larger scalar types, i.e. scalar
75 types whose size is greater or equal to 64 bits, or 0 if this alignment
76 is not specifically capped. */
77int double_scalar_alignment;
78
a1ab4c31
AC
79/* Tree nodes for the various types and decls we create. */
80tree gnat_std_decls[(int) ADT_LAST];
81
82/* Functions to call for each of the possible raise reasons. */
83tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
84
ca8e13e8 85/* Likewise, but with extra info for each of the possible raise reasons. */
437f8c1e
AC
86tree gnat_raise_decls_ext[(int) LAST_REASON_CODE + 1];
87
a1ab4c31
AC
88/* Forward declarations for handlers of attributes. */
89static tree handle_const_attribute (tree *, tree, tree, int, bool *);
90static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
91static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
92static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
93static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
94static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
95static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
0d6e14fd 96static tree handle_leaf_attribute (tree *, tree, tree, int, bool *);
a1ab4c31
AC
97static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
98static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
2724e58f 99static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
7948ae37 100static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
a1ab4c31
AC
101
102/* Fake handler for attributes we don't properly support, typically because
103 they'd require dragging a lot of the common-c front-end circuitry. */
104static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
105
106/* Table of machine-independent internal attributes for Ada. We support
107 this minimal set of attributes to accommodate the needs of builtins. */
108const struct attribute_spec gnat_internal_attribute_table[] =
109{
62d784f7
KT
110 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
111 affects_type_identity } */
112 { "const", 0, 0, true, false, false, handle_const_attribute,
113 false },
114 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute,
115 false },
116 { "pure", 0, 0, true, false, false, handle_pure_attribute,
117 false },
118 { "no vops", 0, 0, true, false, false, handle_novops_attribute,
119 false },
120 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute,
121 false },
122 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute,
123 false },
124 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute,
125 false },
126 { "leaf", 0, 0, true, false, false, handle_leaf_attribute,
127 false },
128 { "malloc", 0, 0, true, false, false, handle_malloc_attribute,
129 false },
130 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute,
131 false },
132
133 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute,
134 false },
135 { "vector_type", 0, 0, false, true, false, handle_vector_type_attribute,
136 false },
137 { "may_alias", 0, 0, false, true, false, NULL, false },
a1ab4c31
AC
138
139 /* ??? format and format_arg are heavy and not supported, which actually
140 prevents support for stdio builtins, which we however declare as part
141 of the common builtins.def contents. */
62d784f7
KT
142 { "format", 3, 3, false, true, true, fake_attribute_handler, false },
143 { "format_arg", 1, 1, false, true, true, fake_attribute_handler, false },
a1ab4c31 144
62d784f7 145 { NULL, 0, 0, false, false, false, NULL, false }
a1ab4c31
AC
146};
147
148/* Associates a GNAT tree node to a GCC tree node. It is used in
149 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
150 of `save_gnu_tree' for more info. */
151static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
152
153#define GET_GNU_TREE(GNAT_ENTITY) \
154 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
155
156#define SET_GNU_TREE(GNAT_ENTITY,VAL) \
157 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
158
159#define PRESENT_GNU_TREE(GNAT_ENTITY) \
160 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
161
162/* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
163static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
164
165#define GET_DUMMY_NODE(GNAT_ENTITY) \
166 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
167
168#define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
169 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
170
171#define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
172 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
173
174/* This variable keeps a table for types for each precision so that we only
175 allocate each of them once. Signed and unsigned types are kept separate.
176
177 Note that these types are only used when fold-const requests something
178 special. Perhaps we should NOT share these types; we'll see how it
179 goes later. */
180static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
181
182/* Likewise for float types, but record these by mode. */
183static GTY(()) tree float_types[NUM_MACHINE_MODES];
184
185/* For each binding contour we allocate a binding_level structure to indicate
186 the binding depth. */
187
d1b38208 188struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
a1ab4c31
AC
189 /* The binding level containing this one (the enclosing binding level). */
190 struct gnat_binding_level *chain;
191 /* The BLOCK node for this level. */
192 tree block;
193 /* If nonzero, the setjmp buffer that needs to be updated for any
194 variable-sized definition within this context. */
195 tree jmpbuf_decl;
196};
197
198/* The binding level currently in effect. */
199static GTY(()) struct gnat_binding_level *current_binding_level;
200
201/* A chain of gnat_binding_level structures awaiting reuse. */
202static GTY((deletable)) struct gnat_binding_level *free_binding_level;
203
228ee426
EB
204/* The context to be used for global declarations. */
205static GTY(()) tree global_context;
206
a1ab4c31 207/* An array of global declarations. */
9771b263 208static GTY(()) vec<tree, va_gc> *global_decls;
a1ab4c31
AC
209
210/* An array of builtin function declarations. */
9771b263 211static GTY(()) vec<tree, va_gc> *builtin_decls;
a1ab4c31
AC
212
213/* An array of global renaming pointers. */
9771b263 214static GTY(()) vec<tree, va_gc> *global_renaming_pointers;
a1ab4c31
AC
215
216/* A chain of unused BLOCK nodes. */
217static GTY((deletable)) tree free_block_chain;
218
842d4ee2
EB
219static int pad_type_hash_marked_p (const void *p);
220static hashval_t pad_type_hash_hash (const void *p);
221static int pad_type_hash_eq (const void *p1, const void *p2);
222
223/* A hash table of padded types. It is modelled on the generic type
224 hash table in tree.c, which must thus be used as a reference. */
225struct GTY(()) pad_type_hash {
226 unsigned long hash;
227 tree type;
228};
229
230static GTY ((if_marked ("pad_type_hash_marked_p"),
231 param_is (struct pad_type_hash)))
232 htab_t pad_type_hash_table;
233
a1ab4c31
AC
234static tree merge_sizes (tree, tree, tree, bool, bool);
235static tree compute_related_constant (tree, tree);
236static tree split_plus (tree, tree *);
a1ab4c31
AC
237static tree float_type_for_precision (int, enum machine_mode);
238static tree convert_to_fat_pointer (tree, tree);
5c475ba9 239static unsigned int scale_by_factor_of (tree, unsigned int);
a1ab4c31
AC
240static bool potential_alignment_gap (tree, tree, tree);
241\f
842d4ee2 242/* Initialize data structures of the utils.c module. */
a1ab4c31
AC
243
244void
842d4ee2 245init_gnat_utils (void)
a1ab4c31 246{
842d4ee2 247 /* Initialize the association of GNAT nodes to GCC trees. */
a9429e29 248 associate_gnat_to_gnu = ggc_alloc_cleared_vec_tree (max_gnat_nodes);
842d4ee2
EB
249
250 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
251 dummy_node_table = ggc_alloc_cleared_vec_tree (max_gnat_nodes);
252
253 /* Initialize the hash table of padded types. */
254 pad_type_hash_table = htab_create_ggc (512, pad_type_hash_hash,
255 pad_type_hash_eq, 0);
a1ab4c31
AC
256}
257
842d4ee2 258/* Destroy data structures of the utils.c module. */
f04b8d69
EB
259
260void
842d4ee2 261destroy_gnat_utils (void)
f04b8d69 262{
842d4ee2 263 /* Destroy the association of GNAT nodes to GCC trees. */
f04b8d69
EB
264 ggc_free (associate_gnat_to_gnu);
265 associate_gnat_to_gnu = NULL;
f04b8d69 266
842d4ee2
EB
267 /* Destroy the association of GNAT nodes to GCC trees as dummies. */
268 ggc_free (dummy_node_table);
269 dummy_node_table = NULL;
270
271 /* Destroy the hash table of padded types. */
272 htab_delete (pad_type_hash_table);
273 pad_type_hash_table = NULL;
274
275 /* Invalidate the global renaming pointers. */
276 invalidate_global_renaming_pointers ();
277}
278\f
a1d8cc63
EB
279/* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
280 tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
281 If NO_CHECK is true, the latter check is suppressed.
a1ab4c31 282
a1d8cc63 283 If GNU_DECL is zero, reset a previous association. */
a1ab4c31
AC
284
285void
286save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
287{
288 /* Check that GNAT_ENTITY is not already defined and that it is being set
a1d8cc63 289 to something which is a decl. If that is not the case, this usually
a1ab4c31
AC
290 means GNAT_ENTITY is defined twice, but occasionally is due to some
291 Gigi problem. */
292 gcc_assert (!(gnu_decl
293 && (PRESENT_GNU_TREE (gnat_entity)
294 || (!no_check && !DECL_P (gnu_decl)))));
295
296 SET_GNU_TREE (gnat_entity, gnu_decl);
297}
298
a1d8cc63
EB
299/* GNAT_ENTITY is a GNAT tree node for an entity. Return the GCC tree node
300 that was associated with it. If there is no such tree node, abort.
a1ab4c31
AC
301
302 In some cases, such as delayed elaboration or expressions that need to
303 be elaborated only once, GNAT_ENTITY is really not an entity. */
304
305tree
306get_gnu_tree (Entity_Id gnat_entity)
307{
308 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
309 return GET_GNU_TREE (gnat_entity);
310}
311
312/* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
313
314bool
315present_gnu_tree (Entity_Id gnat_entity)
316{
317 return PRESENT_GNU_TREE (gnat_entity);
318}
319\f
a1ab4c31
AC
320/* Make a dummy type corresponding to GNAT_TYPE. */
321
322tree
323make_dummy_type (Entity_Id gnat_type)
324{
325 Entity_Id gnat_underlying = Gigi_Equivalent_Type (gnat_type);
326 tree gnu_type;
327
328 /* If there is an equivalent type, get its underlying type. */
329 if (Present (gnat_underlying))
a0b8b1b7 330 gnat_underlying = Gigi_Equivalent_Type (Underlying_Type (gnat_underlying));
a1ab4c31
AC
331
332 /* If there was no equivalent type (can only happen when just annotating
333 types) or underlying type, go back to the original type. */
334 if (No (gnat_underlying))
335 gnat_underlying = gnat_type;
336
337 /* If it there already a dummy type, use that one. Else make one. */
338 if (PRESENT_DUMMY_NODE (gnat_underlying))
339 return GET_DUMMY_NODE (gnat_underlying);
340
341 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
342 an ENUMERAL_TYPE. */
343 gnu_type = make_node (Is_Record_Type (gnat_underlying)
344 ? tree_code_for_record_type (gnat_underlying)
345 : ENUMERAL_TYPE);
346 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
347 TYPE_DUMMY_P (gnu_type) = 1;
10069d53
EB
348 TYPE_STUB_DECL (gnu_type)
349 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
a0b8b1b7
EB
350 if (Is_By_Reference_Type (gnat_underlying))
351 TYPE_BY_REFERENCE_P (gnu_type) = 1;
a1ab4c31
AC
352
353 SET_DUMMY_NODE (gnat_underlying, gnu_type);
354
355 return gnu_type;
356}
e3edbd56
EB
357
358/* Return the dummy type that was made for GNAT_TYPE, if any. */
359
360tree
361get_dummy_type (Entity_Id gnat_type)
362{
363 return GET_DUMMY_NODE (gnat_type);
364}
365
366/* Build dummy fat and thin pointer types whose designated type is specified
367 by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter. */
368
369void
370build_dummy_unc_pointer_types (Entity_Id gnat_desig_type, tree gnu_desig_type)
371{
372 tree gnu_template_type, gnu_ptr_template, gnu_array_type, gnu_ptr_array;
373 tree gnu_fat_type, fields, gnu_object_type;
374
375 gnu_template_type = make_node (RECORD_TYPE);
376 TYPE_NAME (gnu_template_type) = create_concat_name (gnat_desig_type, "XUB");
377 TYPE_DUMMY_P (gnu_template_type) = 1;
378 gnu_ptr_template = build_pointer_type (gnu_template_type);
379
380 gnu_array_type = make_node (ENUMERAL_TYPE);
381 TYPE_NAME (gnu_array_type) = create_concat_name (gnat_desig_type, "XUA");
382 TYPE_DUMMY_P (gnu_array_type) = 1;
383 gnu_ptr_array = build_pointer_type (gnu_array_type);
384
385 gnu_fat_type = make_node (RECORD_TYPE);
386 /* Build a stub DECL to trigger the special processing for fat pointer types
387 in gnat_pushdecl. */
388 TYPE_NAME (gnu_fat_type)
389 = create_type_stub_decl (create_concat_name (gnat_desig_type, "XUP"),
390 gnu_fat_type);
391 fields = create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array,
392 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
393 DECL_CHAIN (fields)
394 = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template,
395 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
396 finish_fat_pointer_type (gnu_fat_type, fields);
397 SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type, gnu_desig_type);
398 /* Suppress debug info until after the type is completed. */
399 TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type)) = 1;
400
401 gnu_object_type = make_node (RECORD_TYPE);
402 TYPE_NAME (gnu_object_type) = create_concat_name (gnat_desig_type, "XUT");
403 TYPE_DUMMY_P (gnu_object_type) = 1;
404
405 TYPE_POINTER_TO (gnu_desig_type) = gnu_fat_type;
406 TYPE_OBJECT_RECORD_TYPE (gnu_desig_type) = gnu_object_type;
407}
a1ab4c31 408\f
c99c0026 409/* Return true if we are in the global binding level. */
a1ab4c31 410
c99c0026 411bool
a1ab4c31
AC
412global_bindings_p (void)
413{
c99c0026 414 return force_global || current_function_decl == NULL_TREE;
a1ab4c31
AC
415}
416
a09d56d8 417/* Enter a new binding level. */
a1ab4c31
AC
418
419void
c6bd4220 420gnat_pushlevel (void)
a1ab4c31
AC
421{
422 struct gnat_binding_level *newlevel = NULL;
423
424 /* Reuse a struct for this binding level, if there is one. */
425 if (free_binding_level)
426 {
427 newlevel = free_binding_level;
428 free_binding_level = free_binding_level->chain;
429 }
430 else
a9429e29 431 newlevel = ggc_alloc_gnat_binding_level ();
a1ab4c31
AC
432
433 /* Use a free BLOCK, if any; otherwise, allocate one. */
434 if (free_block_chain)
435 {
436 newlevel->block = free_block_chain;
437 free_block_chain = BLOCK_CHAIN (free_block_chain);
438 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
439 }
440 else
441 newlevel->block = make_node (BLOCK);
442
443 /* Point the BLOCK we just made to its parent. */
444 if (current_binding_level)
445 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
446
a09d56d8
EB
447 BLOCK_VARS (newlevel->block) = NULL_TREE;
448 BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
a1ab4c31
AC
449 TREE_USED (newlevel->block) = 1;
450
a09d56d8 451 /* Add this level to the front of the chain (stack) of active levels. */
a1ab4c31
AC
452 newlevel->chain = current_binding_level;
453 newlevel->jmpbuf_decl = NULL_TREE;
454 current_binding_level = newlevel;
455}
456
457/* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
458 and point FNDECL to this BLOCK. */
459
460void
461set_current_block_context (tree fndecl)
462{
463 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
464 DECL_INITIAL (fndecl) = current_binding_level->block;
a09d56d8 465 set_block_for_group (current_binding_level->block);
a1ab4c31
AC
466}
467
468/* Set the jmpbuf_decl for the current binding level to DECL. */
469
470void
471set_block_jmpbuf_decl (tree decl)
472{
473 current_binding_level->jmpbuf_decl = decl;
474}
475
476/* Get the jmpbuf_decl, if any, for the current binding level. */
477
478tree
c6bd4220 479get_block_jmpbuf_decl (void)
a1ab4c31
AC
480{
481 return current_binding_level->jmpbuf_decl;
482}
483
a09d56d8 484/* Exit a binding level. Set any BLOCK into the current code group. */
a1ab4c31
AC
485
486void
c6bd4220 487gnat_poplevel (void)
a1ab4c31
AC
488{
489 struct gnat_binding_level *level = current_binding_level;
490 tree block = level->block;
491
492 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
72ac05b0 493 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
a1ab4c31
AC
494
495 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
496 are no variables free the block and merge its subblocks into those of its
a09d56d8 497 parent block. Otherwise, add it to the list of its parent. */
a1ab4c31
AC
498 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
499 ;
500 else if (BLOCK_VARS (block) == NULL_TREE)
501 {
502 BLOCK_SUBBLOCKS (level->chain->block)
61e46a7d
NF
503 = block_chainon (BLOCK_SUBBLOCKS (block),
504 BLOCK_SUBBLOCKS (level->chain->block));
a1ab4c31
AC
505 BLOCK_CHAIN (block) = free_block_chain;
506 free_block_chain = block;
507 }
508 else
509 {
510 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
511 BLOCK_SUBBLOCKS (level->chain->block) = block;
512 TREE_USED (block) = 1;
513 set_block_for_group (block);
514 }
515
516 /* Free this binding structure. */
517 current_binding_level = level->chain;
518 level->chain = free_binding_level;
519 free_binding_level = level;
520}
521
2231f17f
EB
522/* Exit a binding level and discard the associated BLOCK. */
523
524void
525gnat_zaplevel (void)
526{
527 struct gnat_binding_level *level = current_binding_level;
528 tree block = level->block;
529
530 BLOCK_CHAIN (block) = free_block_chain;
531 free_block_chain = block;
532
533 /* Free this binding structure. */
534 current_binding_level = level->chain;
535 level->chain = free_binding_level;
536 free_binding_level = level;
537}
a1ab4c31 538\f
4708440c
EB
539/* Set the context of TYPE and its parallel types (if any) to CONTEXT. */
540
541static void
542gnat_set_type_context (tree type, tree context)
543{
544 tree decl = TYPE_STUB_DECL (type);
545
546 TYPE_CONTEXT (type) = context;
547
548 while (decl && DECL_PARALLEL_TYPE (decl))
549 {
550 TYPE_CONTEXT (DECL_PARALLEL_TYPE (decl)) = context;
551 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
552 }
553}
554
228ee426
EB
555/* Record DECL as belonging to the current lexical scope and use GNAT_NODE
556 for location information and flag propagation. */
a1ab4c31
AC
557
558void
559gnat_pushdecl (tree decl, Node_Id gnat_node)
560{
228ee426 561 /* If DECL is public external or at top level, it has global context. */
bd9c7fb9 562 if ((TREE_PUBLIC (decl) && DECL_EXTERNAL (decl)) || global_bindings_p ())
228ee426
EB
563 {
564 if (!global_context)
565 global_context = build_translation_unit_decl (NULL_TREE);
566 DECL_CONTEXT (decl) = global_context;
567 }
a1ab4c31
AC
568 else
569 {
570 DECL_CONTEXT (decl) = current_function_decl;
571
9f62cb92
JJ
572 /* Functions imported in another function are not really nested.
573 For really nested functions mark them initially as needing
574 a static chain for uses of that flag before unnesting;
575 lower_nested_functions will then recompute it. */
576 if (TREE_CODE (decl) == FUNCTION_DECL && !TREE_PUBLIC (decl))
577 DECL_STATIC_CHAIN (decl) = 1;
a1ab4c31
AC
578 }
579
228ee426 580 TREE_NO_WARNING (decl) = (No (gnat_node) || Warnings_Off (gnat_node));
a1ab4c31
AC
581
582 /* Set the location of DECL and emit a declaration for it. */
583 if (Present (gnat_node))
584 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
228ee426 585
a1ab4c31
AC
586 add_decl_expr (decl, gnat_node);
587
588 /* Put the declaration on the list. The list of declarations is in reverse
2231f17f
EB
589 order. The list will be reversed later. Put global declarations in the
590 globals list and local ones in the current block. But skip TYPE_DECLs
591 for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
592 with the debugger and aren't needed anyway. */
593 if (!(TREE_CODE (decl) == TYPE_DECL
594 && TREE_CODE (TREE_TYPE (decl)) == UNCONSTRAINED_ARRAY_TYPE))
a1ab4c31 595 {
9083aacd 596 if (DECL_EXTERNAL (decl))
a1ab4c31 597 {
a1ab4c31 598 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
9771b263 599 vec_safe_push (builtin_decls, decl);
a1ab4c31 600 }
9083aacd 601 else if (global_bindings_p ())
9771b263 602 vec_safe_push (global_decls, decl);
9083aacd 603 else
a1ab4c31 604 {
a963da4d
EB
605 DECL_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
606 BLOCK_VARS (current_binding_level->block) = decl;
a1ab4c31
AC
607 }
608 }
609
610 /* For the declaration of a type, set its name if it either is not already
10069d53 611 set or if the previous type name was not derived from a source name.
a1ab4c31
AC
612 We'd rather have the type named with a real name and all the pointer
613 types to the same object have the same POINTER_TYPE node. Code in the
614 equivalent function of c-decl.c makes a copy of the type node here, but
615 that may cause us trouble with incomplete types. We make an exception
616 for fat pointer types because the compiler automatically builds them
617 for unconstrained array types and the debugger uses them to represent
618 both these and pointers to these. */
619 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
620 {
621 tree t = TREE_TYPE (decl);
622
10069d53 623 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
1aeb40dd 624 {
4cb12a90 625 /* Array and pointer types aren't "tagged" types so we force the
1aeb40dd
EB
626 type to be associated with its typedef in the DWARF back-end,
627 in order to make sure that the latter is always preserved. */
4cb12a90
EB
628 if (!DECL_ARTIFICIAL (decl)
629 && (TREE_CODE (t) == ARRAY_TYPE
630 || TREE_CODE (t) == POINTER_TYPE))
1aeb40dd
EB
631 {
632 tree tt = build_distinct_type_copy (t);
4cb12a90
EB
633 if (TREE_CODE (t) == POINTER_TYPE)
634 TYPE_NEXT_PTR_TO (t) = tt;
1aeb40dd 635 TYPE_NAME (tt) = DECL_NAME (decl);
4708440c 636 gnat_set_type_context (tt, DECL_CONTEXT (decl));
1aeb40dd
EB
637 TYPE_STUB_DECL (tt) = TYPE_STUB_DECL (t);
638 DECL_ORIGINAL_TYPE (decl) = tt;
639 }
640 }
315cff15 641 else if (TYPE_IS_FAT_POINTER_P (t))
a1ab4c31 642 {
e3edbd56 643 /* We need a variant for the placeholder machinery to work. */
a1ab4c31
AC
644 tree tt = build_variant_type_copy (t);
645 TYPE_NAME (tt) = decl;
4708440c 646 gnat_set_type_context (tt, DECL_CONTEXT (decl));
a1ab4c31
AC
647 TREE_USED (tt) = TREE_USED (t);
648 TREE_TYPE (decl) = tt;
40c88b94
EB
649 if (DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
650 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
651 else
652 DECL_ORIGINAL_TYPE (decl) = t;
40c88b94 653 DECL_ARTIFICIAL (decl) = 0;
e3edbd56 654 t = NULL_TREE;
a1ab4c31
AC
655 }
656 else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
657 ;
658 else
659 t = NULL_TREE;
660
e3edbd56
EB
661 /* Propagate the name to all the anonymous variants. This is needed
662 for the type qualifiers machinery to work properly. */
a1ab4c31
AC
663 if (t)
664 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
e3edbd56 665 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
d4d05b52
EB
666 {
667 TYPE_NAME (t) = decl;
4708440c 668 gnat_set_type_context (t, DECL_CONTEXT (decl));
d4d05b52 669 }
a1ab4c31
AC
670 }
671}
672\f
842d4ee2
EB
673/* Create a record type that contains a SIZE bytes long field of TYPE with a
674 starting bit position so that it is aligned to ALIGN bits, and leaving at
675 least ROOM bytes free before the field. BASE_ALIGN is the alignment the
0746af5e
EB
676 record is guaranteed to get. GNAT_NODE is used for the position of the
677 associated TYPE_DECL. */
842d4ee2
EB
678
679tree
680make_aligning_type (tree type, unsigned int align, tree size,
0746af5e 681 unsigned int base_align, int room, Node_Id gnat_node)
842d4ee2
EB
682{
683 /* We will be crafting a record type with one field at a position set to be
684 the next multiple of ALIGN past record'address + room bytes. We use a
685 record placeholder to express record'address. */
686 tree record_type = make_node (RECORD_TYPE);
687 tree record = build0 (PLACEHOLDER_EXPR, record_type);
688
689 tree record_addr_st
690 = convert (sizetype, build_unary_op (ADDR_EXPR, NULL_TREE, record));
691
692 /* The diagram below summarizes the shape of what we manipulate:
693
694 <--------- pos ---------->
695 { +------------+-------------+-----------------+
696 record =>{ |############| ... | field (type) |
697 { +------------+-------------+-----------------+
698 |<-- room -->|<- voffset ->|<---- size ----->|
699 o o
700 | |
701 record_addr vblock_addr
702
703 Every length is in sizetype bytes there, except "pos" which has to be
704 set as a bit position in the GCC tree for the record. */
705 tree room_st = size_int (room);
706 tree vblock_addr_st = size_binop (PLUS_EXPR, record_addr_st, room_st);
707 tree voffset_st, pos, field;
708
709 tree name = TYPE_NAME (type);
710
711 if (TREE_CODE (name) == TYPE_DECL)
712 name = DECL_NAME (name);
713 name = concat_name (name, "ALIGN");
714 TYPE_NAME (record_type) = name;
715
716 /* Compute VOFFSET and then POS. The next byte position multiple of some
717 alignment after some address is obtained by "and"ing the alignment minus
718 1 with the two's complement of the address. */
719 voffset_st = size_binop (BIT_AND_EXPR,
720 fold_build1 (NEGATE_EXPR, sizetype, vblock_addr_st),
721 size_int ((align / BITS_PER_UNIT) - 1));
722
723 /* POS = (ROOM + VOFFSET) * BIT_PER_UNIT, in bitsizetype. */
724 pos = size_binop (MULT_EXPR,
725 convert (bitsizetype,
726 size_binop (PLUS_EXPR, room_st, voffset_st)),
727 bitsize_unit_node);
728
729 /* Craft the GCC record representation. We exceptionally do everything
730 manually here because 1) our generic circuitry is not quite ready to
731 handle the complex position/size expressions we are setting up, 2) we
732 have a strong simplifying factor at hand: we know the maximum possible
733 value of voffset, and 3) we have to set/reset at least the sizes in
734 accordance with this maximum value anyway, as we need them to convey
735 what should be "alloc"ated for this type.
736
737 Use -1 as the 'addressable' indication for the field to prevent the
738 creation of a bitfield. We don't need one, it would have damaging
739 consequences on the alignment computation, and create_field_decl would
740 make one without this special argument, for instance because of the
741 complex position expression. */
742 field = create_field_decl (get_identifier ("F"), type, record_type, size,
743 pos, 1, -1);
744 TYPE_FIELDS (record_type) = field;
745
746 TYPE_ALIGN (record_type) = base_align;
747 TYPE_USER_ALIGN (record_type) = 1;
748
749 TYPE_SIZE (record_type)
750 = size_binop (PLUS_EXPR,
751 size_binop (MULT_EXPR, convert (bitsizetype, size),
752 bitsize_unit_node),
753 bitsize_int (align + room * BITS_PER_UNIT));
754 TYPE_SIZE_UNIT (record_type)
755 = size_binop (PLUS_EXPR, size,
756 size_int (room + align / BITS_PER_UNIT));
757
758 SET_TYPE_MODE (record_type, BLKmode);
759 relate_alias_sets (record_type, type, ALIAS_SET_COPY);
760
761 /* Declare it now since it will never be declared otherwise. This is
762 necessary to ensure that its subtrees are properly marked. */
74746d49 763 create_type_decl (name, record_type, true, false, gnat_node);
842d4ee2
EB
764
765 return record_type;
766}
767
768/* TYPE is a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE that is being used
769 as the field type of a packed record if IN_RECORD is true, or as the
770 component type of a packed array if IN_RECORD is false. See if we can
771 rewrite it either as a type that has a non-BLKmode, which we can pack
772 tighter in the packed record case, or as a smaller type. If so, return
773 the new type. If not, return the original type. */
774
775tree
776make_packable_type (tree type, bool in_record)
777{
ae7e9ddd 778 unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE (type));
842d4ee2
EB
779 unsigned HOST_WIDE_INT new_size;
780 tree new_type, old_field, field_list = NULL_TREE;
781 unsigned int align;
782
783 /* No point in doing anything if the size is zero. */
784 if (size == 0)
785 return type;
786
787 new_type = make_node (TREE_CODE (type));
788
789 /* Copy the name and flags from the old type to that of the new.
790 Note that we rely on the pointer equality created here for
791 TYPE_NAME to look through conversions in various places. */
792 TYPE_NAME (new_type) = TYPE_NAME (type);
793 TYPE_JUSTIFIED_MODULAR_P (new_type) = TYPE_JUSTIFIED_MODULAR_P (type);
794 TYPE_CONTAINS_TEMPLATE_P (new_type) = TYPE_CONTAINS_TEMPLATE_P (type);
795 if (TREE_CODE (type) == RECORD_TYPE)
796 TYPE_PADDING_P (new_type) = TYPE_PADDING_P (type);
797
798 /* If we are in a record and have a small size, set the alignment to
799 try for an integral mode. Otherwise set it to try for a smaller
800 type with BLKmode. */
801 if (in_record && size <= MAX_FIXED_MODE_SIZE)
802 {
803 align = ceil_pow2 (size);
804 TYPE_ALIGN (new_type) = align;
805 new_size = (size + align - 1) & -align;
806 }
807 else
808 {
809 unsigned HOST_WIDE_INT align;
810
811 /* Do not try to shrink the size if the RM size is not constant. */
812 if (TYPE_CONTAINS_TEMPLATE_P (type)
cc269bb6 813 || !tree_fits_uhwi_p (TYPE_ADA_SIZE (type)))
842d4ee2
EB
814 return type;
815
816 /* Round the RM size up to a unit boundary to get the minimal size
817 for a BLKmode record. Give up if it's already the size. */
eb1ce453 818 new_size = tree_to_uhwi (TYPE_ADA_SIZE (type));
842d4ee2
EB
819 new_size = (new_size + BITS_PER_UNIT - 1) & -BITS_PER_UNIT;
820 if (new_size == size)
821 return type;
822
823 align = new_size & -new_size;
824 TYPE_ALIGN (new_type) = MIN (TYPE_ALIGN (type), align);
825 }
826
827 TYPE_USER_ALIGN (new_type) = 1;
828
829 /* Now copy the fields, keeping the position and size as we don't want
830 to change the layout by propagating the packedness downwards. */
831 for (old_field = TYPE_FIELDS (type); old_field;
832 old_field = DECL_CHAIN (old_field))
833 {
834 tree new_field_type = TREE_TYPE (old_field);
835 tree new_field, new_size;
836
837 if (RECORD_OR_UNION_TYPE_P (new_field_type)
838 && !TYPE_FAT_POINTER_P (new_field_type)
cc269bb6 839 && tree_fits_uhwi_p (TYPE_SIZE (new_field_type)))
842d4ee2
EB
840 new_field_type = make_packable_type (new_field_type, true);
841
842 /* However, for the last field in a not already packed record type
843 that is of an aggregate type, we need to use the RM size in the
844 packable version of the record type, see finish_record_type. */
845 if (!DECL_CHAIN (old_field)
846 && !TYPE_PACKED (type)
847 && RECORD_OR_UNION_TYPE_P (new_field_type)
848 && !TYPE_FAT_POINTER_P (new_field_type)
849 && !TYPE_CONTAINS_TEMPLATE_P (new_field_type)
850 && TYPE_ADA_SIZE (new_field_type))
851 new_size = TYPE_ADA_SIZE (new_field_type);
852 else
853 new_size = DECL_SIZE (old_field);
854
855 new_field
856 = create_field_decl (DECL_NAME (old_field), new_field_type, new_type,
857 new_size, bit_position (old_field),
858 TYPE_PACKED (type),
859 !DECL_NONADDRESSABLE_P (old_field));
860
861 DECL_INTERNAL_P (new_field) = DECL_INTERNAL_P (old_field);
862 SET_DECL_ORIGINAL_FIELD_TO_FIELD (new_field, old_field);
863 if (TREE_CODE (new_type) == QUAL_UNION_TYPE)
864 DECL_QUALIFIER (new_field) = DECL_QUALIFIER (old_field);
865
866 DECL_CHAIN (new_field) = field_list;
867 field_list = new_field;
868 }
869
870 finish_record_type (new_type, nreverse (field_list), 2, false);
871 relate_alias_sets (new_type, type, ALIAS_SET_COPY);
872 SET_DECL_PARALLEL_TYPE (TYPE_STUB_DECL (new_type),
873 DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type)));
874
875 /* If this is a padding record, we never want to make the size smaller
876 than what was specified. For QUAL_UNION_TYPE, also copy the size. */
877 if (TYPE_IS_PADDING_P (type) || TREE_CODE (type) == QUAL_UNION_TYPE)
878 {
879 TYPE_SIZE (new_type) = TYPE_SIZE (type);
880 TYPE_SIZE_UNIT (new_type) = TYPE_SIZE_UNIT (type);
881 new_size = size;
882 }
883 else
884 {
885 TYPE_SIZE (new_type) = bitsize_int (new_size);
886 TYPE_SIZE_UNIT (new_type)
887 = size_int ((new_size + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
888 }
889
890 if (!TYPE_CONTAINS_TEMPLATE_P (type))
891 SET_TYPE_ADA_SIZE (new_type, TYPE_ADA_SIZE (type));
892
893 compute_record_mode (new_type);
894
895 /* Try harder to get a packable type if necessary, for example
896 in case the record itself contains a BLKmode field. */
897 if (in_record && TYPE_MODE (new_type) == BLKmode)
898 SET_TYPE_MODE (new_type,
899 mode_for_size_tree (TYPE_SIZE (new_type), MODE_INT, 1));
900
901 /* If neither the mode nor the size has shrunk, return the old type. */
902 if (TYPE_MODE (new_type) == BLKmode && new_size >= size)
903 return type;
904
905 return new_type;
906}
907
908/* Given a type TYPE, return a new type whose size is appropriate for SIZE.
909 If TYPE is the best type, return it. Otherwise, make a new type. We
910 only support new integral and pointer types. FOR_BIASED is true if
911 we are making a biased type. */
912
913tree
914make_type_from_size (tree type, tree size_tree, bool for_biased)
915{
916 unsigned HOST_WIDE_INT size;
917 bool biased_p;
918 tree new_type;
919
920 /* If size indicates an error, just return TYPE to avoid propagating
921 the error. Likewise if it's too large to represent. */
cc269bb6 922 if (!size_tree || !tree_fits_uhwi_p (size_tree))
842d4ee2
EB
923 return type;
924
ae7e9ddd 925 size = tree_to_uhwi (size_tree);
842d4ee2
EB
926
927 switch (TREE_CODE (type))
928 {
929 case INTEGER_TYPE:
930 case ENUMERAL_TYPE:
931 case BOOLEAN_TYPE:
932 biased_p = (TREE_CODE (type) == INTEGER_TYPE
933 && TYPE_BIASED_REPRESENTATION_P (type));
934
935 /* Integer types with precision 0 are forbidden. */
936 if (size == 0)
937 size = 1;
938
939 /* Only do something if the type isn't a packed array type and doesn't
940 already have the proper size and the size isn't too large. */
941 if (TYPE_IS_PACKED_ARRAY_TYPE_P (type)
942 || (TYPE_PRECISION (type) == size && biased_p == for_biased)
943 || size > LONG_LONG_TYPE_SIZE)
944 break;
945
946 biased_p |= for_biased;
947 if (TYPE_UNSIGNED (type) || biased_p)
948 new_type = make_unsigned_type (size);
949 else
950 new_type = make_signed_type (size);
951 TREE_TYPE (new_type) = TREE_TYPE (type) ? TREE_TYPE (type) : type;
952 SET_TYPE_RM_MIN_VALUE (new_type,
953 convert (TREE_TYPE (new_type),
954 TYPE_MIN_VALUE (type)));
955 SET_TYPE_RM_MAX_VALUE (new_type,
956 convert (TREE_TYPE (new_type),
957 TYPE_MAX_VALUE (type)));
958 /* Copy the name to show that it's essentially the same type and
959 not a subrange type. */
960 TYPE_NAME (new_type) = TYPE_NAME (type);
961 TYPE_BIASED_REPRESENTATION_P (new_type) = biased_p;
962 SET_TYPE_RM_SIZE (new_type, bitsize_int (size));
963 return new_type;
964
965 case RECORD_TYPE:
966 /* Do something if this is a fat pointer, in which case we
967 may need to return the thin pointer. */
968 if (TYPE_FAT_POINTER_P (type) && size < POINTER_SIZE * 2)
969 {
970 enum machine_mode p_mode = mode_for_size (size, MODE_INT, 0);
971 if (!targetm.valid_pointer_mode (p_mode))
972 p_mode = ptr_mode;
973 return
974 build_pointer_type_for_mode
975 (TYPE_OBJECT_RECORD_TYPE (TYPE_UNCONSTRAINED_ARRAY (type)),
976 p_mode, 0);
977 }
978 break;
979
980 case POINTER_TYPE:
981 /* Only do something if this is a thin pointer, in which case we
982 may need to return the fat pointer. */
983 if (TYPE_IS_THIN_POINTER_P (type) && size >= POINTER_SIZE * 2)
984 return
985 build_pointer_type (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)));
986 break;
987
988 default:
989 break;
990 }
991
992 return type;
993}
994
995/* See if the data pointed to by the hash table slot is marked. */
996
997static int
998pad_type_hash_marked_p (const void *p)
999{
1000 const_tree const type = ((const struct pad_type_hash *) p)->type;
1001
1002 return ggc_marked_p (type);
1003}
1004
1005/* Return the cached hash value. */
1006
1007static hashval_t
1008pad_type_hash_hash (const void *p)
1009{
1010 return ((const struct pad_type_hash *) p)->hash;
1011}
1012
1013/* Return 1 iff the padded types are equivalent. */
1014
1015static int
1016pad_type_hash_eq (const void *p1, const void *p2)
1017{
1018 const struct pad_type_hash *const t1 = (const struct pad_type_hash *) p1;
1019 const struct pad_type_hash *const t2 = (const struct pad_type_hash *) p2;
1020 tree type1, type2;
1021
1022 if (t1->hash != t2->hash)
1023 return 0;
1024
1025 type1 = t1->type;
1026 type2 = t2->type;
1027
1028 /* We consider that the padded types are equivalent if they pad the same
1029 type and have the same size, alignment and RM size. Taking the mode
1030 into account is redundant since it is determined by the others. */
1031 return
1032 TREE_TYPE (TYPE_FIELDS (type1)) == TREE_TYPE (TYPE_FIELDS (type2))
1033 && TYPE_SIZE (type1) == TYPE_SIZE (type2)
1034 && TYPE_ALIGN (type1) == TYPE_ALIGN (type2)
1035 && TYPE_ADA_SIZE (type1) == TYPE_ADA_SIZE (type2);
1036}
1037
1038/* Ensure that TYPE has SIZE and ALIGN. Make and return a new padded type
1039 if needed. We have already verified that SIZE and TYPE are large enough.
1040 GNAT_ENTITY is used to name the resulting record and to issue a warning.
1041 IS_COMPONENT_TYPE is true if this is being done for the component type of
1042 an array. IS_USER_TYPE is true if the original type needs to be completed.
1043 DEFINITION is true if this type is being defined. SET_RM_SIZE is true if
1044 the RM size of the resulting type is to be set to SIZE too. */
1045
1046tree
1047maybe_pad_type (tree type, tree size, unsigned int align,
1048 Entity_Id gnat_entity, bool is_component_type,
1049 bool is_user_type, bool definition, bool set_rm_size)
1050{
1051 tree orig_size = TYPE_SIZE (type);
1052 tree record, field;
1053
1054 /* If TYPE is a padded type, see if it agrees with any size and alignment
1055 we were given. If so, return the original type. Otherwise, strip
1056 off the padding, since we will either be returning the inner type
1057 or repadding it. If no size or alignment is specified, use that of
1058 the original padded type. */
1059 if (TYPE_IS_PADDING_P (type))
1060 {
1061 if ((!size
1062 || operand_equal_p (round_up (size,
1063 MAX (align, TYPE_ALIGN (type))),
1064 round_up (TYPE_SIZE (type),
1065 MAX (align, TYPE_ALIGN (type))),
1066 0))
1067 && (align == 0 || align == TYPE_ALIGN (type)))
1068 return type;
1069
1070 if (!size)
1071 size = TYPE_SIZE (type);
1072 if (align == 0)
1073 align = TYPE_ALIGN (type);
1074
1075 type = TREE_TYPE (TYPE_FIELDS (type));
1076 orig_size = TYPE_SIZE (type);
1077 }
1078
1079 /* If the size is either not being changed or is being made smaller (which
1080 is not done here and is only valid for bitfields anyway), show the size
1081 isn't changing. Likewise, clear the alignment if it isn't being
1082 changed. Then return if we aren't doing anything. */
1083 if (size
1084 && (operand_equal_p (size, orig_size, 0)
1085 || (TREE_CODE (orig_size) == INTEGER_CST
1086 && tree_int_cst_lt (size, orig_size))))
1087 size = NULL_TREE;
1088
1089 if (align == TYPE_ALIGN (type))
1090 align = 0;
1091
1092 if (align == 0 && !size)
1093 return type;
1094
1095 /* If requested, complete the original type and give it a name. */
1096 if (is_user_type)
1097 create_type_decl (get_entity_name (gnat_entity), type,
74746d49 1098 !Comes_From_Source (gnat_entity),
842d4ee2
EB
1099 !(TYPE_NAME (type)
1100 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1101 && DECL_IGNORED_P (TYPE_NAME (type))),
1102 gnat_entity);
1103
1104 /* We used to modify the record in place in some cases, but that could
1105 generate incorrect debugging information. So make a new record
1106 type and name. */
1107 record = make_node (RECORD_TYPE);
1108 TYPE_PADDING_P (record) = 1;
1109
1110 if (Present (gnat_entity))
1111 TYPE_NAME (record) = create_concat_name (gnat_entity, "PAD");
1112
1113 TYPE_ALIGN (record) = align;
1114 TYPE_SIZE (record) = size ? size : orig_size;
1115 TYPE_SIZE_UNIT (record)
1116 = convert (sizetype,
1117 size_binop (CEIL_DIV_EXPR, TYPE_SIZE (record),
1118 bitsize_unit_node));
1119
1120 /* If we are changing the alignment and the input type is a record with
1121 BLKmode and a small constant size, try to make a form that has an
1122 integral mode. This might allow the padding record to also have an
1123 integral mode, which will be much more efficient. There is no point
1124 in doing so if a size is specified unless it is also a small constant
1125 size and it is incorrect to do so if we cannot guarantee that the mode
1126 will be naturally aligned since the field must always be addressable.
1127
1128 ??? This might not always be a win when done for a stand-alone object:
1129 since the nominal and the effective type of the object will now have
1130 different modes, a VIEW_CONVERT_EXPR will be required for converting
1131 between them and it might be hard to overcome afterwards, including
1132 at the RTL level when the stand-alone object is accessed as a whole. */
1133 if (align != 0
1134 && RECORD_OR_UNION_TYPE_P (type)
1135 && TYPE_MODE (type) == BLKmode
1136 && !TYPE_BY_REFERENCE_P (type)
1137 && TREE_CODE (orig_size) == INTEGER_CST
1138 && !TREE_OVERFLOW (orig_size)
1139 && compare_tree_int (orig_size, MAX_FIXED_MODE_SIZE) <= 0
1140 && (!size
1141 || (TREE_CODE (size) == INTEGER_CST
1142 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) <= 0)))
1143 {
1144 tree packable_type = make_packable_type (type, true);
1145 if (TYPE_MODE (packable_type) != BLKmode
1146 && align >= TYPE_ALIGN (packable_type))
1147 type = packable_type;
1148 }
1149
1150 /* Now create the field with the original size. */
1151 field = create_field_decl (get_identifier ("F"), type, record, orig_size,
1152 bitsize_zero_node, 0, 1);
1153 DECL_INTERNAL_P (field) = 1;
1154
1155 /* Do not emit debug info until after the auxiliary record is built. */
1156 finish_record_type (record, field, 1, false);
1157
1158 /* Set the RM size if requested. */
1159 if (set_rm_size)
1160 {
1161 SET_TYPE_ADA_SIZE (record, size ? size : orig_size);
1162
1163 /* If the padded type is complete and has constant size, we canonicalize
1164 it by means of the hash table. This is consistent with the language
1165 semantics and ensures that gigi and the middle-end have a common view
1166 of these padded types. */
1167 if (TREE_CONSTANT (TYPE_SIZE (record)))
1168 {
1169 hashval_t hashcode;
1170 struct pad_type_hash in, *h;
1171 void **loc;
1172
1173 hashcode = iterative_hash_object (TYPE_HASH (type), 0);
1174 hashcode = iterative_hash_expr (TYPE_SIZE (record), hashcode);
1175 hashcode = iterative_hash_hashval_t (TYPE_ALIGN (record), hashcode);
1176 hashcode = iterative_hash_expr (TYPE_ADA_SIZE (record), hashcode);
1177
1178 in.hash = hashcode;
1179 in.type = record;
1180 h = (struct pad_type_hash *)
1181 htab_find_with_hash (pad_type_hash_table, &in, hashcode);
1182 if (h)
1183 {
1184 record = h->type;
1185 goto built;
1186 }
1187
1188 h = ggc_alloc_pad_type_hash ();
1189 h->hash = hashcode;
1190 h->type = record;
1191 loc = htab_find_slot_with_hash (pad_type_hash_table, h, hashcode,
1192 INSERT);
1193 *loc = (void *)h;
1194 }
1195 }
1196
1197 /* Unless debugging information isn't being written for the input type,
1198 write a record that shows what we are a subtype of and also make a
1199 variable that indicates our size, if still variable. */
1200 if (TREE_CODE (orig_size) != INTEGER_CST
1201 && TYPE_NAME (record)
1202 && TYPE_NAME (type)
1203 && !(TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1204 && DECL_IGNORED_P (TYPE_NAME (type))))
1205 {
1206 tree marker = make_node (RECORD_TYPE);
1207 tree name = TYPE_NAME (record);
1208 tree orig_name = TYPE_NAME (type);
1209
1210 if (TREE_CODE (name) == TYPE_DECL)
1211 name = DECL_NAME (name);
1212
1213 if (TREE_CODE (orig_name) == TYPE_DECL)
1214 orig_name = DECL_NAME (orig_name);
1215
1216 TYPE_NAME (marker) = concat_name (name, "XVS");
1217 finish_record_type (marker,
1218 create_field_decl (orig_name,
1219 build_reference_type (type),
1220 marker, NULL_TREE, NULL_TREE,
1221 0, 0),
1222 0, true);
1223
1224 add_parallel_type (record, marker);
1225
1226 if (definition && size && TREE_CODE (size) != INTEGER_CST)
1227 TYPE_SIZE_UNIT (marker)
1228 = create_var_decl (concat_name (name, "XVZ"), NULL_TREE, sizetype,
1229 TYPE_SIZE_UNIT (record), false, false, false,
1230 false, NULL, gnat_entity);
1231 }
1232
1233 rest_of_record_type_compilation (record);
1234
1235built:
1236 /* If the size was widened explicitly, maybe give a warning. Take the
1237 original size as the maximum size of the input if there was an
1238 unconstrained record involved and round it up to the specified alignment,
1239 if one was specified. But don't do it if we are just annotating types
1240 and the type is tagged, since tagged types aren't fully laid out in this
1241 mode. */
1242 if (!size
1243 || TREE_CODE (size) == COND_EXPR
1244 || TREE_CODE (size) == MAX_EXPR
1245 || No (gnat_entity)
1246 || (type_annotate_only && Is_Tagged_Type (Etype (gnat_entity))))
1247 return record;
1248
1249 if (CONTAINS_PLACEHOLDER_P (orig_size))
1250 orig_size = max_size (orig_size, true);
1251
1252 if (align)
1253 orig_size = round_up (orig_size, align);
1254
1255 if (!operand_equal_p (size, orig_size, 0)
1256 && !(TREE_CODE (size) == INTEGER_CST
1257 && TREE_CODE (orig_size) == INTEGER_CST
1258 && (TREE_OVERFLOW (size)
1259 || TREE_OVERFLOW (orig_size)
1260 || tree_int_cst_lt (size, orig_size))))
1261 {
1262 Node_Id gnat_error_node = Empty;
1263
1264 if (Is_Packed_Array_Type (gnat_entity))
1265 gnat_entity = Original_Array_Type (gnat_entity);
1266
1267 if ((Ekind (gnat_entity) == E_Component
1268 || Ekind (gnat_entity) == E_Discriminant)
1269 && Present (Component_Clause (gnat_entity)))
1270 gnat_error_node = Last_Bit (Component_Clause (gnat_entity));
1271 else if (Present (Size_Clause (gnat_entity)))
1272 gnat_error_node = Expression (Size_Clause (gnat_entity));
1273
1274 /* Generate message only for entities that come from source, since
1275 if we have an entity created by expansion, the message will be
1276 generated for some other corresponding source entity. */
1277 if (Comes_From_Source (gnat_entity))
1278 {
1279 if (Present (gnat_error_node))
1280 post_error_ne_tree ("{^ }bits of & unused?",
1281 gnat_error_node, gnat_entity,
1282 size_diffop (size, orig_size));
1283 else if (is_component_type)
1284 post_error_ne_tree ("component of& padded{ by ^ bits}?",
1285 gnat_entity, gnat_entity,
1286 size_diffop (size, orig_size));
1287 }
1288 }
1289
1290 return record;
1291}
1292\f
1293/* Relate the alias sets of GNU_NEW_TYPE and GNU_OLD_TYPE according to OP.
1294 If this is a multi-dimensional array type, do this recursively.
1295
1296 OP may be
1297 - ALIAS_SET_COPY: the new set is made a copy of the old one.
1298 - ALIAS_SET_SUPERSET: the new set is made a superset of the old one.
1299 - ALIAS_SET_SUBSET: the new set is made a subset of the old one. */
1300
1301void
1302relate_alias_sets (tree gnu_new_type, tree gnu_old_type, enum alias_set_op op)
1303{
1304 /* Remove any padding from GNU_OLD_TYPE. It doesn't matter in the case
1305 of a one-dimensional array, since the padding has the same alias set
1306 as the field type, but if it's a multi-dimensional array, we need to
1307 see the inner types. */
1308 while (TREE_CODE (gnu_old_type) == RECORD_TYPE
1309 && (TYPE_JUSTIFIED_MODULAR_P (gnu_old_type)
1310 || TYPE_PADDING_P (gnu_old_type)))
1311 gnu_old_type = TREE_TYPE (TYPE_FIELDS (gnu_old_type));
1312
1313 /* Unconstrained array types are deemed incomplete and would thus be given
1314 alias set 0. Retrieve the underlying array type. */
1315 if (TREE_CODE (gnu_old_type) == UNCONSTRAINED_ARRAY_TYPE)
1316 gnu_old_type
1317 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_old_type))));
1318 if (TREE_CODE (gnu_new_type) == UNCONSTRAINED_ARRAY_TYPE)
1319 gnu_new_type
1320 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_new_type))));
1321
1322 if (TREE_CODE (gnu_new_type) == ARRAY_TYPE
1323 && TREE_CODE (TREE_TYPE (gnu_new_type)) == ARRAY_TYPE
1324 && TYPE_MULTI_ARRAY_P (TREE_TYPE (gnu_new_type)))
1325 relate_alias_sets (TREE_TYPE (gnu_new_type), TREE_TYPE (gnu_old_type), op);
1326
1327 switch (op)
1328 {
1329 case ALIAS_SET_COPY:
1330 /* The alias set shouldn't be copied between array types with different
1331 aliasing settings because this can break the aliasing relationship
1332 between the array type and its element type. */
1333#ifndef ENABLE_CHECKING
1334 if (flag_strict_aliasing)
1335#endif
1336 gcc_assert (!(TREE_CODE (gnu_new_type) == ARRAY_TYPE
1337 && TREE_CODE (gnu_old_type) == ARRAY_TYPE
1338 && TYPE_NONALIASED_COMPONENT (gnu_new_type)
1339 != TYPE_NONALIASED_COMPONENT (gnu_old_type)));
1340
1341 TYPE_ALIAS_SET (gnu_new_type) = get_alias_set (gnu_old_type);
1342 break;
1343
1344 case ALIAS_SET_SUBSET:
1345 case ALIAS_SET_SUPERSET:
1346 {
1347 alias_set_type old_set = get_alias_set (gnu_old_type);
1348 alias_set_type new_set = get_alias_set (gnu_new_type);
1349
1350 /* Do nothing if the alias sets conflict. This ensures that we
1351 never call record_alias_subset several times for the same pair
1352 or at all for alias set 0. */
1353 if (!alias_sets_conflict_p (old_set, new_set))
1354 {
1355 if (op == ALIAS_SET_SUBSET)
1356 record_alias_subset (old_set, new_set);
1357 else
1358 record_alias_subset (new_set, old_set);
1359 }
1360 }
1361 break;
1362
1363 default:
1364 gcc_unreachable ();
1365 }
1366
1367 record_component_aliases (gnu_new_type);
1368}
1369\f
1aeb40dd
EB
1370/* Record TYPE as a builtin type for Ada. NAME is the name of the type.
1371 ARTIFICIAL_P is true if it's a type that was generated by the compiler. */
a1ab4c31
AC
1372
1373void
1aeb40dd 1374record_builtin_type (const char *name, tree type, bool artificial_p)
a1ab4c31 1375{
c172df28
AH
1376 tree type_decl = build_decl (input_location,
1377 TYPE_DECL, get_identifier (name), type);
1aeb40dd 1378 DECL_ARTIFICIAL (type_decl) = artificial_p;
bc712852 1379 TYPE_ARTIFICIAL (type) = artificial_p;
10069d53 1380 gnat_pushdecl (type_decl, Empty);
a1ab4c31 1381
10069d53
EB
1382 if (debug_hooks->type_decl)
1383 debug_hooks->type_decl (type_decl, false);
a1ab4c31
AC
1384}
1385\f
e3edbd56
EB
1386/* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1387 finish constructing the record type as a fat pointer type. */
1388
1389void
1390finish_fat_pointer_type (tree record_type, tree field_list)
1391{
1392 /* Make sure we can put it into a register. */
5da8c011
EB
1393 if (STRICT_ALIGNMENT)
1394 TYPE_ALIGN (record_type) = MIN (BIGGEST_ALIGNMENT, 2 * POINTER_SIZE);
e3edbd56
EB
1395
1396 /* Show what it really is. */
1397 TYPE_FAT_POINTER_P (record_type) = 1;
1398
1399 /* Do not emit debug info for it since the types of its fields may still be
1400 incomplete at this point. */
1401 finish_record_type (record_type, field_list, 0, false);
1402
1403 /* Force type_contains_placeholder_p to return true on it. Although the
1404 PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
1405 type but the representation of the unconstrained array. */
1406 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type) = 2;
1407}
1408
032d1b71 1409/* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
a1ab4c31
AC
1410 finish constructing the record or union type. If REP_LEVEL is zero, this
1411 record has no representation clause and so will be entirely laid out here.
1412 If REP_LEVEL is one, this record has a representation clause and has been
1413 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
1414 this record is derived from a parent record and thus inherits its layout;
032d1b71
EB
1415 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
1416 we need to write debug information about this type. */
a1ab4c31
AC
1417
1418void
032d1b71
EB
1419finish_record_type (tree record_type, tree field_list, int rep_level,
1420 bool debug_info_p)
a1ab4c31
AC
1421{
1422 enum tree_code code = TREE_CODE (record_type);
1423 tree name = TYPE_NAME (record_type);
1424 tree ada_size = bitsize_zero_node;
1425 tree size = bitsize_zero_node;
1426 bool had_size = TYPE_SIZE (record_type) != 0;
1427 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
1428 bool had_align = TYPE_ALIGN (record_type) != 0;
1429 tree field;
1430
032d1b71 1431 TYPE_FIELDS (record_type) = field_list;
a1ab4c31 1432
10069d53
EB
1433 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
1434 generate debug info and have a parallel type. */
1435 if (name && TREE_CODE (name) == TYPE_DECL)
1436 name = DECL_NAME (name);
1437 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
a1ab4c31
AC
1438
1439 /* Globally initialize the record first. If this is a rep'ed record,
1440 that just means some initializations; otherwise, layout the record. */
1441 if (rep_level > 0)
1442 {
1443 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
a1ab4c31
AC
1444
1445 if (!had_size_unit)
1446 TYPE_SIZE_UNIT (record_type) = size_zero_node;
b1fa9126 1447
a1ab4c31
AC
1448 if (!had_size)
1449 TYPE_SIZE (record_type) = bitsize_zero_node;
1450
1451 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
1452 out just like a UNION_TYPE, since the size will be fixed. */
1453 else if (code == QUAL_UNION_TYPE)
1454 code = UNION_TYPE;
1455 }
1456 else
1457 {
1458 /* Ensure there isn't a size already set. There can be in an error
1459 case where there is a rep clause but all fields have errors and
1460 no longer have a position. */
1461 TYPE_SIZE (record_type) = 0;
bb358f1c
EB
1462
1463 /* Ensure we use the traditional GCC layout for bitfields when we need
1464 to pack the record type or have a representation clause. The other
1465 possible layout (Microsoft C compiler), if available, would prevent
1466 efficient packing in almost all cases. */
1467#ifdef TARGET_MS_BITFIELD_LAYOUT
1468 if (TARGET_MS_BITFIELD_LAYOUT && TYPE_PACKED (record_type))
1469 decl_attributes (&record_type,
1470 tree_cons (get_identifier ("gcc_struct"),
1471 NULL_TREE, NULL_TREE),
1472 ATTR_FLAG_TYPE_IN_PLACE);
1473#endif
1474
a1ab4c31
AC
1475 layout_type (record_type);
1476 }
1477
1478 /* At this point, the position and size of each field is known. It was
1479 either set before entry by a rep clause, or by laying out the type above.
1480
1481 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
1482 to compute the Ada size; the GCC size and alignment (for rep'ed records
1483 that are not padding types); and the mode (for rep'ed records). We also
1484 clear the DECL_BIT_FIELD indication for the cases we know have not been
1485 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
1486
1487 if (code == QUAL_UNION_TYPE)
032d1b71 1488 field_list = nreverse (field_list);
a1ab4c31 1489
910ad8de 1490 for (field = field_list; field; field = DECL_CHAIN (field))
a1ab4c31
AC
1491 {
1492 tree type = TREE_TYPE (field);
1493 tree pos = bit_position (field);
1494 tree this_size = DECL_SIZE (field);
1495 tree this_ada_size;
1496
e1e5852c 1497 if (RECORD_OR_UNION_TYPE_P (type)
315cff15 1498 && !TYPE_FAT_POINTER_P (type)
a1ab4c31
AC
1499 && !TYPE_CONTAINS_TEMPLATE_P (type)
1500 && TYPE_ADA_SIZE (type))
1501 this_ada_size = TYPE_ADA_SIZE (type);
1502 else
1503 this_ada_size = this_size;
1504
1505 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
1506 if (DECL_BIT_FIELD (field)
1507 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
1508 {
1509 unsigned int align = TYPE_ALIGN (type);
1510
1511 /* In the general case, type alignment is required. */
1512 if (value_factor_p (pos, align))
1513 {
1514 /* The enclosing record type must be sufficiently aligned.
1515 Otherwise, if no alignment was specified for it and it
1516 has been laid out already, bump its alignment to the
1517 desired one if this is compatible with its size. */
1518 if (TYPE_ALIGN (record_type) >= align)
1519 {
1520 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
1521 DECL_BIT_FIELD (field) = 0;
1522 }
1523 else if (!had_align
1524 && rep_level == 0
1525 && value_factor_p (TYPE_SIZE (record_type), align))
1526 {
1527 TYPE_ALIGN (record_type) = align;
1528 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
1529 DECL_BIT_FIELD (field) = 0;
1530 }
1531 }
1532
1533 /* In the non-strict alignment case, only byte alignment is. */
1534 if (!STRICT_ALIGNMENT
1535 && DECL_BIT_FIELD (field)
1536 && value_factor_p (pos, BITS_PER_UNIT))
1537 DECL_BIT_FIELD (field) = 0;
1538 }
1539
c1abd261
EB
1540 /* If we still have DECL_BIT_FIELD set at this point, we know that the
1541 field is technically not addressable. Except that it can actually
1542 be addressed if it is BLKmode and happens to be properly aligned. */
1543 if (DECL_BIT_FIELD (field)
1544 && !(DECL_MODE (field) == BLKmode
1545 && value_factor_p (pos, BITS_PER_UNIT)))
1546 DECL_NONADDRESSABLE_P (field) = 1;
a1ab4c31
AC
1547
1548 /* A type must be as aligned as its most aligned field that is not
1549 a bit-field. But this is already enforced by layout_type. */
1550 if (rep_level > 0 && !DECL_BIT_FIELD (field))
1551 TYPE_ALIGN (record_type)
1552 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
1553
1554 switch (code)
1555 {
1556 case UNION_TYPE:
1557 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
1558 size = size_binop (MAX_EXPR, size, this_size);
1559 break;
1560
1561 case QUAL_UNION_TYPE:
1562 ada_size
1563 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1564 this_ada_size, ada_size);
1565 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1566 this_size, size);
1567 break;
1568
1569 case RECORD_TYPE:
1570 /* Since we know here that all fields are sorted in order of
1571 increasing bit position, the size of the record is one
1572 higher than the ending bit of the last field processed
1573 unless we have a rep clause, since in that case we might
1574 have a field outside a QUAL_UNION_TYPE that has a higher ending
1575 position. So use a MAX in that case. Also, if this field is a
1576 QUAL_UNION_TYPE, we need to take into account the previous size in
1577 the case of empty variants. */
1578 ada_size
1579 = merge_sizes (ada_size, pos, this_ada_size,
1580 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1581 size
1582 = merge_sizes (size, pos, this_size,
1583 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1584 break;
1585
1586 default:
1587 gcc_unreachable ();
1588 }
1589 }
1590
1591 if (code == QUAL_UNION_TYPE)
032d1b71 1592 nreverse (field_list);
a1ab4c31
AC
1593
1594 if (rep_level < 2)
1595 {
1596 /* If this is a padding record, we never want to make the size smaller
1597 than what was specified in it, if any. */
315cff15 1598 if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
a1ab4c31
AC
1599 size = TYPE_SIZE (record_type);
1600
1601 /* Now set any of the values we've just computed that apply. */
315cff15 1602 if (!TYPE_FAT_POINTER_P (record_type)
a1ab4c31
AC
1603 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
1604 SET_TYPE_ADA_SIZE (record_type, ada_size);
1605
1606 if (rep_level > 0)
1607 {
1608 tree size_unit = had_size_unit
1609 ? TYPE_SIZE_UNIT (record_type)
1610 : convert (sizetype,
1611 size_binop (CEIL_DIV_EXPR, size,
1612 bitsize_unit_node));
1613 unsigned int align = TYPE_ALIGN (record_type);
1614
1615 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
1616 TYPE_SIZE_UNIT (record_type)
1617 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
1618
1619 compute_record_mode (record_type);
1620 }
1621 }
1622
032d1b71 1623 if (debug_info_p)
a1ab4c31
AC
1624 rest_of_record_type_compilation (record_type);
1625}
1626
a5695aa2
EB
1627/* Append PARALLEL_TYPE on the chain of parallel types of TYPE. */
1628
1629void
1630add_parallel_type (tree type, tree parallel_type)
1631{
1632 tree decl = TYPE_STUB_DECL (type);
1633
1634 while (DECL_PARALLEL_TYPE (decl))
1635 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
1636
1637 SET_DECL_PARALLEL_TYPE (decl, parallel_type);
1638}
1639
1640/* Return true if TYPE has a parallel type. */
1641
1642static bool
1643has_parallel_type (tree type)
1644{
1645 tree decl = TYPE_STUB_DECL (type);
1646
1647 return DECL_PARALLEL_TYPE (decl) != NULL_TREE;
1648}
1649
032d1b71
EB
1650/* Wrap up compilation of RECORD_TYPE, i.e. output all the debug information
1651 associated with it. It need not be invoked directly in most cases since
1652 finish_record_type takes care of doing so, but this can be necessary if
1653 a parallel type is to be attached to the record type. */
a1ab4c31
AC
1654
1655void
1656rest_of_record_type_compilation (tree record_type)
1657{
a1ab4c31 1658 bool var_size = false;
fb88e1dd 1659 tree field;
a1ab4c31 1660
fb88e1dd
EB
1661 /* If this is a padded type, the bulk of the debug info has already been
1662 generated for the field's type. */
1663 if (TYPE_IS_PADDING_P (record_type))
1664 return;
1665
a5695aa2
EB
1666 /* If the type already has a parallel type (XVS type), then we're done. */
1667 if (has_parallel_type (record_type))
1668 return;
1669
fb88e1dd 1670 for (field = TYPE_FIELDS (record_type); field; field = DECL_CHAIN (field))
a1ab4c31
AC
1671 {
1672 /* We need to make an XVE/XVU record if any field has variable size,
1673 whether or not the record does. For example, if we have a union,
1674 it may be that all fields, rounded up to the alignment, have the
1675 same size, in which case we'll use that size. But the debug
1676 output routines (except Dwarf2) won't be able to output the fields,
1677 so we need to make the special record. */
1678 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
1679 /* If a field has a non-constant qualifier, the record will have
1680 variable size too. */
fb88e1dd 1681 || (TREE_CODE (record_type) == QUAL_UNION_TYPE
a1ab4c31
AC
1682 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
1683 {
1684 var_size = true;
1685 break;
1686 }
1687 }
1688
fb88e1dd
EB
1689 /* If this record type is of variable size, make a parallel record type that
1690 will tell the debugger how the former is laid out (see exp_dbug.ads). */
1691 if (var_size)
a1ab4c31
AC
1692 {
1693 tree new_record_type
1694 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
1695 ? UNION_TYPE : TREE_CODE (record_type));
0fb2335d 1696 tree orig_name = TYPE_NAME (record_type), new_name;
a1ab4c31 1697 tree last_pos = bitsize_zero_node;
0fb2335d 1698 tree old_field, prev_old_field = NULL_TREE;
a1ab4c31 1699
0fb2335d
EB
1700 if (TREE_CODE (orig_name) == TYPE_DECL)
1701 orig_name = DECL_NAME (orig_name);
1702
1703 new_name
1704 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
1705 ? "XVU" : "XVE");
1706 TYPE_NAME (new_record_type) = new_name;
a1ab4c31
AC
1707 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
1708 TYPE_STUB_DECL (new_record_type)
0fb2335d 1709 = create_type_stub_decl (new_name, new_record_type);
a1ab4c31
AC
1710 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
1711 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
1712 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
1713 TYPE_SIZE_UNIT (new_record_type)
1714 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
1715
5c475ba9
EB
1716 /* Now scan all the fields, replacing each field with a new field
1717 corresponding to the new encoding. */
a1ab4c31 1718 for (old_field = TYPE_FIELDS (record_type); old_field;
910ad8de 1719 old_field = DECL_CHAIN (old_field))
a1ab4c31
AC
1720 {
1721 tree field_type = TREE_TYPE (old_field);
1722 tree field_name = DECL_NAME (old_field);
a1ab4c31 1723 tree curpos = bit_position (old_field);
5c475ba9 1724 tree pos, new_field;
a1ab4c31
AC
1725 bool var = false;
1726 unsigned int align = 0;
a1ab4c31 1727
5c475ba9
EB
1728 /* We're going to do some pattern matching below so remove as many
1729 conversions as possible. */
1730 curpos = remove_conversions (curpos, true);
a1ab4c31 1731
5c475ba9 1732 /* See how the position was modified from the last position.
a1ab4c31 1733
5c475ba9
EB
1734 There are two basic cases we support: a value was added
1735 to the last position or the last position was rounded to
1736 a boundary and they something was added. Check for the
1737 first case first. If not, see if there is any evidence
1738 of rounding. If so, round the last position and retry.
a1ab4c31 1739
5c475ba9 1740 If this is a union, the position can be taken as zero. */
a1ab4c31 1741 if (TREE_CODE (new_record_type) == UNION_TYPE)
5c475ba9 1742 pos = bitsize_zero_node;
a1ab4c31
AC
1743 else
1744 pos = compute_related_constant (curpos, last_pos);
1745
5c475ba9
EB
1746 if (!pos
1747 && TREE_CODE (curpos) == MULT_EXPR
cc269bb6 1748 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1)))
a1ab4c31
AC
1749 {
1750 tree offset = TREE_OPERAND (curpos, 0);
ae7e9ddd 1751 align = tree_to_uhwi (TREE_OPERAND (curpos, 1));
5c475ba9
EB
1752 align = scale_by_factor_of (offset, align);
1753 last_pos = round_up (last_pos, align);
1754 pos = compute_related_constant (curpos, last_pos);
a1ab4c31 1755 }
5c475ba9
EB
1756 else if (!pos
1757 && TREE_CODE (curpos) == PLUS_EXPR
cc269bb6 1758 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1))
a1ab4c31 1759 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
5a36c51b
RS
1760 && tree_fits_uhwi_p
1761 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1)))
a1ab4c31 1762 {
5c475ba9
EB
1763 tree offset = TREE_OPERAND (TREE_OPERAND (curpos, 0), 0);
1764 unsigned HOST_WIDE_INT addend
ae7e9ddd 1765 = tree_to_uhwi (TREE_OPERAND (curpos, 1));
a1ab4c31 1766 align
ae7e9ddd 1767 = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1));
5c475ba9
EB
1768 align = scale_by_factor_of (offset, align);
1769 align = MIN (align, addend & -addend);
1770 last_pos = round_up (last_pos, align);
1771 pos = compute_related_constant (curpos, last_pos);
a1ab4c31 1772 }
5c475ba9 1773 else if (potential_alignment_gap (prev_old_field, old_field, pos))
a1ab4c31
AC
1774 {
1775 align = TYPE_ALIGN (field_type);
5c475ba9
EB
1776 last_pos = round_up (last_pos, align);
1777 pos = compute_related_constant (curpos, last_pos);
a1ab4c31
AC
1778 }
1779
1780 /* If we can't compute a position, set it to zero.
1781
5c475ba9
EB
1782 ??? We really should abort here, but it's too much work
1783 to get this correct for all cases. */
a1ab4c31
AC
1784 if (!pos)
1785 pos = bitsize_zero_node;
1786
1787 /* See if this type is variable-sized and make a pointer type
1788 and indicate the indirection if so. Beware that the debug
1789 back-end may adjust the position computed above according
1790 to the alignment of the field type, i.e. the pointer type
1791 in this case, if we don't preventively counter that. */
1792 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
1793 {
1794 field_type = build_pointer_type (field_type);
1795 if (align != 0 && TYPE_ALIGN (field_type) > align)
1796 {
1797 field_type = copy_node (field_type);
1798 TYPE_ALIGN (field_type) = align;
1799 }
1800 var = true;
1801 }
1802
1803 /* Make a new field name, if necessary. */
1804 if (var || align != 0)
1805 {
1806 char suffix[16];
1807
1808 if (align != 0)
1809 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
1810 align / BITS_PER_UNIT);
1811 else
1812 strcpy (suffix, "XVL");
1813
0fb2335d 1814 field_name = concat_name (field_name, suffix);
a1ab4c31
AC
1815 }
1816
da01bfee
EB
1817 new_field
1818 = create_field_decl (field_name, field_type, new_record_type,
1819 DECL_SIZE (old_field), pos, 0, 0);
910ad8de 1820 DECL_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
a1ab4c31
AC
1821 TYPE_FIELDS (new_record_type) = new_field;
1822
1823 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1824 zero. The only time it's not the last field of the record
1825 is when there are other components at fixed positions after
1826 it (meaning there was a rep clause for every field) and we
1827 want to be able to encode them. */
1828 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
1829 (TREE_CODE (TREE_TYPE (old_field))
1830 == QUAL_UNION_TYPE)
1831 ? bitsize_zero_node
1832 : DECL_SIZE (old_field));
1833 prev_old_field = old_field;
1834 }
1835
fb88e1dd 1836 TYPE_FIELDS (new_record_type) = nreverse (TYPE_FIELDS (new_record_type));
a1ab4c31 1837
a5695aa2 1838 add_parallel_type (record_type, new_record_type);
a1ab4c31 1839 }
a1ab4c31
AC
1840}
1841
a1ab4c31 1842/* Utility function of above to merge LAST_SIZE, the previous size of a record
1e17ef87
EB
1843 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
1844 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
1845 replace a value of zero with the old size. If HAS_REP is true, we take the
1846 MAX of the end position of this field with LAST_SIZE. In all other cases,
1847 we use FIRST_BIT plus SIZE. Return an expression for the size. */
a1ab4c31
AC
1848
1849static tree
1850merge_sizes (tree last_size, tree first_bit, tree size, bool special,
1851 bool has_rep)
1852{
1853 tree type = TREE_TYPE (last_size);
c6bd4220 1854 tree new_size;
a1ab4c31
AC
1855
1856 if (!special || TREE_CODE (size) != COND_EXPR)
1857 {
c6bd4220 1858 new_size = size_binop (PLUS_EXPR, first_bit, size);
a1ab4c31 1859 if (has_rep)
c6bd4220 1860 new_size = size_binop (MAX_EXPR, last_size, new_size);
a1ab4c31
AC
1861 }
1862
1863 else
c6bd4220
EB
1864 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
1865 integer_zerop (TREE_OPERAND (size, 1))
1866 ? last_size : merge_sizes (last_size, first_bit,
1867 TREE_OPERAND (size, 1),
1868 1, has_rep),
1869 integer_zerop (TREE_OPERAND (size, 2))
1870 ? last_size : merge_sizes (last_size, first_bit,
1871 TREE_OPERAND (size, 2),
1872 1, has_rep));
a1ab4c31
AC
1873
1874 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1875 when fed through substitute_in_expr) into thinking that a constant
1876 size is not constant. */
c6bd4220
EB
1877 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
1878 new_size = TREE_OPERAND (new_size, 0);
a1ab4c31 1879
c6bd4220 1880 return new_size;
a1ab4c31
AC
1881}
1882
1883/* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1884 related by the addition of a constant. Return that constant if so. */
1885
1886static tree
1887compute_related_constant (tree op0, tree op1)
1888{
1889 tree op0_var, op1_var;
1890 tree op0_con = split_plus (op0, &op0_var);
1891 tree op1_con = split_plus (op1, &op1_var);
1892 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1893
1894 if (operand_equal_p (op0_var, op1_var, 0))
1895 return result;
1896 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1897 return result;
1898 else
1899 return 0;
1900}
1901
1902/* Utility function of above to split a tree OP which may be a sum, into a
1903 constant part, which is returned, and a variable part, which is stored
1904 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1905 bitsizetype. */
1906
1907static tree
1908split_plus (tree in, tree *pvar)
1909{
722356ce
EB
1910 /* Strip conversions in order to ease the tree traversal and maximize the
1911 potential for constant or plus/minus discovery. We need to be careful
a1ab4c31
AC
1912 to always return and set *pvar to bitsizetype trees, but it's worth
1913 the effort. */
722356ce 1914 in = remove_conversions (in, false);
a1ab4c31
AC
1915
1916 *pvar = convert (bitsizetype, in);
1917
1918 if (TREE_CODE (in) == INTEGER_CST)
1919 {
1920 *pvar = bitsize_zero_node;
1921 return convert (bitsizetype, in);
1922 }
1923 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1924 {
1925 tree lhs_var, rhs_var;
1926 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1927 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1928
1929 if (lhs_var == TREE_OPERAND (in, 0)
1930 && rhs_var == TREE_OPERAND (in, 1))
1931 return bitsize_zero_node;
1932
1933 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1934 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1935 }
1936 else
1937 return bitsize_zero_node;
1938}
1939\f
d47d0a8d
EB
1940/* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1941 subprogram. If it is VOID_TYPE, then we are dealing with a procedure,
1942 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1943 PARM_DECL nodes that are the subprogram parameters. CICO_LIST is the
1944 copy-in/copy-out list to be stored into the TYPE_CICO_LIST field.
1945 RETURN_UNCONSTRAINED_P is true if the function returns an unconstrained
1946 object. RETURN_BY_DIRECT_REF_P is true if the function returns by direct
1947 reference. RETURN_BY_INVISI_REF_P is true if the function returns by
1948 invisible reference. */
a1ab4c31
AC
1949
1950tree
1951create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
d47d0a8d
EB
1952 bool return_unconstrained_p, bool return_by_direct_ref_p,
1953 bool return_by_invisi_ref_p)
a1ab4c31 1954{
e5b00edf
NF
1955 /* A list of the data type nodes of the subprogram formal parameters.
1956 This list is generated by traversing the input list of PARM_DECL
1957 nodes. */
9771b263 1958 vec<tree, va_gc> *param_type_list = NULL;
d47d0a8d 1959 tree t, type;
a1ab4c31 1960
910ad8de 1961 for (t = param_decl_list; t; t = DECL_CHAIN (t))
9771b263 1962 vec_safe_push (param_type_list, TREE_TYPE (t));
a1ab4c31 1963
e5b00edf 1964 type = build_function_type_vec (return_type, param_type_list);
a1ab4c31 1965
d47d0a8d
EB
1966 /* TYPE may have been shared since GCC hashes types. If it has a different
1967 CICO_LIST, make a copy. Likewise for the various flags. */
523e82a7
EB
1968 if (!fntype_same_flags_p (type, cico_list, return_unconstrained_p,
1969 return_by_direct_ref_p, return_by_invisi_ref_p))
d47d0a8d
EB
1970 {
1971 type = copy_type (type);
1972 TYPE_CI_CO_LIST (type) = cico_list;
1973 TYPE_RETURN_UNCONSTRAINED_P (type) = return_unconstrained_p;
1974 TYPE_RETURN_BY_DIRECT_REF_P (type) = return_by_direct_ref_p;
1975 TREE_ADDRESSABLE (type) = return_by_invisi_ref_p;
1976 }
a1ab4c31 1977
a1ab4c31
AC
1978 return type;
1979}
1980\f
1981/* Return a copy of TYPE but safe to modify in any way. */
1982
1983tree
1984copy_type (tree type)
1985{
c6bd4220 1986 tree new_type = copy_node (type);
a1ab4c31 1987
90dcfecb
EB
1988 /* Unshare the language-specific data. */
1989 if (TYPE_LANG_SPECIFIC (type))
1990 {
1991 TYPE_LANG_SPECIFIC (new_type) = NULL;
1992 SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
1993 }
1994
1995 /* And the contents of the language-specific slot if needed. */
1996 if ((INTEGRAL_TYPE_P (type) || TREE_CODE (type) == REAL_TYPE)
1997 && TYPE_RM_VALUES (type))
1998 {
1999 TYPE_RM_VALUES (new_type) = NULL_TREE;
2000 SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
2001 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
2002 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
2003 }
2004
a1ab4c31
AC
2005 /* copy_node clears this field instead of copying it, because it is
2006 aliased with TREE_CHAIN. */
c6bd4220 2007 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
a1ab4c31 2008
c6bd4220
EB
2009 TYPE_POINTER_TO (new_type) = 0;
2010 TYPE_REFERENCE_TO (new_type) = 0;
2011 TYPE_MAIN_VARIANT (new_type) = new_type;
2012 TYPE_NEXT_VARIANT (new_type) = 0;
a1ab4c31 2013
c6bd4220 2014 return new_type;
a1ab4c31
AC
2015}
2016\f
c1abd261
EB
2017/* Return a subtype of sizetype with range MIN to MAX and whose
2018 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
2019 of the associated TYPE_DECL. */
a1ab4c31
AC
2020
2021tree
2022create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
2023{
2024 /* First build a type for the desired range. */
523e82a7 2025 tree type = build_nonshared_range_type (sizetype, min, max);
a1ab4c31 2026
523e82a7 2027 /* Then set the index type. */
a1ab4c31 2028 SET_TYPE_INDEX_TYPE (type, index);
74746d49 2029 create_type_decl (NULL_TREE, type, true, false, gnat_node);
c1abd261 2030
a1ab4c31
AC
2031 return type;
2032}
84fb43a1
EB
2033
2034/* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
2035 sizetype is used. */
2036
2037tree
2038create_range_type (tree type, tree min, tree max)
2039{
2040 tree range_type;
2041
2042 if (type == NULL_TREE)
2043 type = sizetype;
2044
2045 /* First build a type with the base range. */
523e82a7
EB
2046 range_type = build_nonshared_range_type (type, TYPE_MIN_VALUE (type),
2047 TYPE_MAX_VALUE (type));
84fb43a1
EB
2048
2049 /* Then set the actual range. */
523e82a7
EB
2050 SET_TYPE_RM_MIN_VALUE (range_type, convert (type, min));
2051 SET_TYPE_RM_MAX_VALUE (range_type, convert (type, max));
84fb43a1
EB
2052
2053 return range_type;
2054}
a1ab4c31 2055\f
10069d53
EB
2056/* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of a type.
2057 TYPE_NAME gives the name of the type and TYPE is a ..._TYPE node giving
2058 its data type. */
2059
2060tree
2061create_type_stub_decl (tree type_name, tree type)
2062{
2063 /* Using a named TYPE_DECL ensures that a type name marker is emitted in
2064 STABS while setting DECL_ARTIFICIAL ensures that no DW_TAG_typedef is
2065 emitted in DWARF. */
c172df28
AH
2066 tree type_decl = build_decl (input_location,
2067 TYPE_DECL, type_name, type);
10069d53 2068 DECL_ARTIFICIAL (type_decl) = 1;
bc712852 2069 TYPE_ARTIFICIAL (type) = 1;
10069d53
EB
2070 return type_decl;
2071}
2072
2073/* Return a TYPE_DECL node. TYPE_NAME gives the name of the type and TYPE
2074 is a ..._TYPE node giving its data type. ARTIFICIAL_P is true if this
2075 is a declaration that was generated by the compiler. DEBUG_INFO_P is
2076 true if we need to write debug information about this type. GNAT_NODE
2077 is used for the position of the decl. */
a1ab4c31
AC
2078
2079tree
74746d49
EB
2080create_type_decl (tree type_name, tree type, bool artificial_p,
2081 bool debug_info_p, Node_Id gnat_node)
a1ab4c31 2082{
a1ab4c31 2083 enum tree_code code = TREE_CODE (type);
10069d53
EB
2084 bool named = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
2085 tree type_decl;
a1ab4c31 2086
10069d53
EB
2087 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
2088 gcc_assert (!TYPE_IS_DUMMY_P (type));
a1ab4c31 2089
10069d53
EB
2090 /* If the type hasn't been named yet, we're naming it; preserve an existing
2091 TYPE_STUB_DECL that has been attached to it for some purpose. */
2092 if (!named && TYPE_STUB_DECL (type))
2093 {
2094 type_decl = TYPE_STUB_DECL (type);
2095 DECL_NAME (type_decl) = type_name;
2096 }
2097 else
74746d49 2098 type_decl = build_decl (input_location, TYPE_DECL, type_name, type);
a1ab4c31 2099
10069d53 2100 DECL_ARTIFICIAL (type_decl) = artificial_p;
bc712852 2101 TYPE_ARTIFICIAL (type) = artificial_p;
58c8f770
EB
2102
2103 /* Add this decl to the current binding level. */
10069d53 2104 gnat_pushdecl (type_decl, gnat_node);
58c8f770 2105
10069d53
EB
2106 /* If we're naming the type, equate the TYPE_STUB_DECL to the name.
2107 This causes the name to be also viewed as a "tag" by the debug
2108 back-end, with the advantage that no DW_TAG_typedef is emitted
2109 for artificial "tagged" types in DWARF. */
2110 if (!named)
2111 TYPE_STUB_DECL (type) = type_decl;
2112
50741117
EB
2113 /* Do not generate debug info for UNCONSTRAINED_ARRAY_TYPE that the
2114 back-end doesn't support, and for others if we don't need to. */
a1ab4c31
AC
2115 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
2116 DECL_IGNORED_P (type_decl) = 1;
a1ab4c31
AC
2117
2118 return type_decl;
2119}
10069d53 2120\f
a1ab4c31
AC
2121/* Return a VAR_DECL or CONST_DECL node.
2122
2123 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
2124 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
2125 the GCC tree for an optional initial expression; NULL_TREE if none.
2126
2127 CONST_FLAG is true if this variable is constant, in which case we might
2128 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
2129
2130 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
2131 definition to be made visible outside of the current compilation unit, for
2132 instance variable definitions in a package specification.
2133
1e17ef87 2134 EXTERN_FLAG is true when processing an external variable declaration (as
a1ab4c31
AC
2135 opposed to a definition: no storage is to be allocated for the variable).
2136
2137 STATIC_FLAG is only relevant when not at top level. In that case
2138 it indicates whether to always allocate storage to the variable.
2139
2140 GNAT_NODE is used for the position of the decl. */
2141
2142tree
2143create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
2144 bool const_flag, bool public_flag, bool extern_flag,
2145 bool static_flag, bool const_decl_allowed_p,
2146 struct attrib *attr_list, Node_Id gnat_node)
2147{
50179d58
EB
2148 /* Whether the initializer is a constant initializer. At the global level
2149 or for an external object or an object to be allocated in static memory,
2150 we check that it is a valid constant expression for use in initializing
2151 a static variable; otherwise, we only check that it is constant. */
a1ab4c31
AC
2152 bool init_const
2153 = (var_init != 0
2154 && gnat_types_compatible_p (type, TREE_TYPE (var_init))
50179d58 2155 && (global_bindings_p () || extern_flag || static_flag
a1ab4c31
AC
2156 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init)) != 0
2157 : TREE_CONSTANT (var_init)));
2158
2159 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
2160 case the initializer may be used in-lieu of the DECL node (as done in
2161 Identifier_to_gnu). This is useful to prevent the need of elaboration
2162 code when an identifier for which such a decl is made is in turn used as
2163 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
2164 but extra constraints apply to this choice (see below) and are not
2165 relevant to the distinction we wish to make. */
2166 bool constant_p = const_flag && init_const;
2167
2168 /* The actual DECL node. CONST_DECL was initially intended for enumerals
2169 and may be used for scalars in general but not for aggregates. */
2170 tree var_decl
c172df28
AH
2171 = build_decl (input_location,
2172 (constant_p && const_decl_allowed_p
a1ab4c31
AC
2173 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
2174 var_name, type);
2175
2176 /* If this is external, throw away any initializations (they will be done
2177 elsewhere) unless this is a constant for which we would like to remain
2178 able to get the initializer. If we are defining a global here, leave a
2179 constant initialization and save any variable elaborations for the
2180 elaboration routine. If we are just annotating types, throw away the
2181 initialization if it isn't a constant. */
2182 if ((extern_flag && !constant_p)
2183 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
2184 var_init = NULL_TREE;
2185
2186 /* At the global level, an initializer requiring code to be generated
2187 produces elaboration statements. Check that such statements are allowed,
2188 that is, not violating a No_Elaboration_Code restriction. */
3b9e8343 2189 if (global_bindings_p () && var_init != 0 && !init_const)
a1ab4c31 2190 Check_Elaboration_Code_Allowed (gnat_node);
3b9e8343 2191
8b7b0c36
JH
2192 DECL_INITIAL (var_decl) = var_init;
2193 TREE_READONLY (var_decl) = const_flag;
2194 DECL_EXTERNAL (var_decl) = extern_flag;
2195 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
2196 TREE_CONSTANT (var_decl) = constant_p;
2197 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
2198 = TYPE_VOLATILE (type);
a1ab4c31
AC
2199
2200 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
2201 try to fiddle with DECL_COMMON. However, on platforms that don't
2202 support global BSS sections, uninitialized global variables would
2203 go in DATA instead, thus increasing the size of the executable. */
2204 if (!flag_no_common
2205 && TREE_CODE (var_decl) == VAR_DECL
3b9e8343 2206 && TREE_PUBLIC (var_decl)
a1ab4c31
AC
2207 && !have_global_bss_p ())
2208 DECL_COMMON (var_decl) = 1;
a1ab4c31 2209
2231f17f
EB
2210 /* At the global binding level, we need to allocate static storage for the
2211 variable if it isn't external. Otherwise, we allocate automatic storage
2212 unless requested not to. */
a1ab4c31 2213 TREE_STATIC (var_decl)
2231f17f 2214 = !extern_flag && (static_flag || global_bindings_p ());
a1ab4c31 2215
5225a138
EB
2216 /* For an external constant whose initializer is not absolute, do not emit
2217 debug info. In DWARF this would mean a global relocation in a read-only
c01fe451 2218 section which runs afoul of the PE-COFF run-time relocation mechanism. */
5225a138
EB
2219 if (extern_flag
2220 && constant_p
50179d58 2221 && var_init
5225a138
EB
2222 && initializer_constant_valid_p (var_init, TREE_TYPE (var_init))
2223 != null_pointer_node)
2224 DECL_IGNORED_P (var_decl) = 1;
2225
a1ab4c31
AC
2226 if (TREE_SIDE_EFFECTS (var_decl))
2227 TREE_ADDRESSABLE (var_decl) = 1;
2228
74746d49
EB
2229 /* ??? Some attributes cannot be applied to CONST_DECLs. */
2230 if (TREE_CODE (var_decl) == VAR_DECL)
2231 process_attributes (&var_decl, &attr_list, true, gnat_node);
2232
2233 /* Add this decl to the current binding level. */
2234 gnat_pushdecl (var_decl, gnat_node);
2235
58c8f770 2236 if (TREE_CODE (var_decl) == VAR_DECL)
a1ab4c31 2237 {
58c8f770
EB
2238 if (asm_name)
2239 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
74746d49 2240
a1ab4c31
AC
2241 if (global_bindings_p ())
2242 rest_of_decl_compilation (var_decl, true, 0);
2243 }
a1ab4c31
AC
2244
2245 return var_decl;
2246}
2247\f
2248/* Return true if TYPE, an aggregate type, contains (or is) an array. */
2249
2250static bool
2251aggregate_type_contains_array_p (tree type)
2252{
2253 switch (TREE_CODE (type))
2254 {
2255 case RECORD_TYPE:
2256 case UNION_TYPE:
2257 case QUAL_UNION_TYPE:
2258 {
2259 tree field;
910ad8de 2260 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
a1ab4c31
AC
2261 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
2262 && aggregate_type_contains_array_p (TREE_TYPE (field)))
2263 return true;
2264 return false;
2265 }
2266
2267 case ARRAY_TYPE:
2268 return true;
2269
2270 default:
2271 gcc_unreachable ();
2272 }
2273}
2274
62f9f3ce 2275/* Return a FIELD_DECL node. FIELD_NAME is the field's name, FIELD_TYPE is
da01bfee
EB
2276 its type and RECORD_TYPE is the type of the enclosing record. If SIZE is
2277 nonzero, it is the specified size of the field. If POS is nonzero, it is
2278 the bit position. PACKED is 1 if the enclosing record is packed, -1 if it
2279 has Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
62f9f3ce
EB
2280 means we are allowed to take the address of the field; if it is negative,
2281 we should not make a bitfield, which is used by make_aligning_type. */
a1ab4c31
AC
2282
2283tree
2284create_field_decl (tree field_name, tree field_type, tree record_type,
da01bfee 2285 tree size, tree pos, int packed, int addressable)
a1ab4c31 2286{
c172df28
AH
2287 tree field_decl = build_decl (input_location,
2288 FIELD_DECL, field_name, field_type);
a1ab4c31
AC
2289
2290 DECL_CONTEXT (field_decl) = record_type;
2291 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
2292
2293 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
2294 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
2295 Likewise for an aggregate without specified position that contains an
2296 array, because in this case slices of variable length of this array
2297 must be handled by GCC and variable-sized objects need to be aligned
2298 to at least a byte boundary. */
2299 if (packed && (TYPE_MODE (field_type) == BLKmode
2300 || (!pos
2301 && AGGREGATE_TYPE_P (field_type)
2302 && aggregate_type_contains_array_p (field_type))))
2303 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
2304
2305 /* If a size is specified, use it. Otherwise, if the record type is packed
2306 compute a size to use, which may differ from the object's natural size.
2307 We always set a size in this case to trigger the checks for bitfield
2308 creation below, which is typically required when no position has been
2309 specified. */
2310 if (size)
2311 size = convert (bitsizetype, size);
2312 else if (packed == 1)
2313 {
2314 size = rm_size (field_type);
62f9f3ce
EB
2315 if (TYPE_MODE (field_type) == BLKmode)
2316 size = round_up (size, BITS_PER_UNIT);
a1ab4c31
AC
2317 }
2318
2319 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
2320 specified for two reasons: first if the size differs from the natural
2321 size. Second, if the alignment is insufficient. There are a number of
2322 ways the latter can be true.
2323
2324 We never make a bitfield if the type of the field has a nonconstant size,
2325 because no such entity requiring bitfield operations should reach here.
2326
2327 We do *preventively* make a bitfield when there might be the need for it
2328 but we don't have all the necessary information to decide, as is the case
2329 of a field with no specified position in a packed record.
2330
2331 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
2332 in layout_decl or finish_record_type to clear the bit_field indication if
2333 it is in fact not needed. */
2334 if (addressable >= 0
2335 && size
2336 && TREE_CODE (size) == INTEGER_CST
2337 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
2338 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
2339 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
2340 || packed
2341 || (TYPE_ALIGN (record_type) != 0
2342 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
2343 {
2344 DECL_BIT_FIELD (field_decl) = 1;
2345 DECL_SIZE (field_decl) = size;
2346 if (!packed && !pos)
feec4372
EB
2347 {
2348 if (TYPE_ALIGN (record_type) != 0
2349 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))
2350 DECL_ALIGN (field_decl) = TYPE_ALIGN (record_type);
2351 else
2352 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
2353 }
a1ab4c31
AC
2354 }
2355
2356 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
2357
2358 /* Bump the alignment if need be, either for bitfield/packing purposes or
2359 to satisfy the type requirements if no such consideration applies. When
2360 we get the alignment from the type, indicate if this is from an explicit
2361 user request, which prevents stor-layout from lowering it later on. */
2362 {
d9223014 2363 unsigned int bit_align
a1ab4c31
AC
2364 = (DECL_BIT_FIELD (field_decl) ? 1
2365 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT : 0);
2366
2367 if (bit_align > DECL_ALIGN (field_decl))
2368 DECL_ALIGN (field_decl) = bit_align;
2369 else if (!bit_align && TYPE_ALIGN (field_type) > DECL_ALIGN (field_decl))
2370 {
2371 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
2372 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (field_type);
2373 }
2374 }
2375
2376 if (pos)
2377 {
2378 /* We need to pass in the alignment the DECL is known to have.
2379 This is the lowest-order bit set in POS, but no more than
2380 the alignment of the record, if one is specified. Note
2381 that an alignment of 0 is taken as infinite. */
2382 unsigned int known_align;
2383
cc269bb6 2384 if (tree_fits_uhwi_p (pos))
ae7e9ddd 2385 known_align = tree_to_uhwi (pos) & - tree_to_uhwi (pos);
a1ab4c31
AC
2386 else
2387 known_align = BITS_PER_UNIT;
2388
2389 if (TYPE_ALIGN (record_type)
2390 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
2391 known_align = TYPE_ALIGN (record_type);
2392
2393 layout_decl (field_decl, known_align);
2394 SET_DECL_OFFSET_ALIGN (field_decl,
cc269bb6 2395 tree_fits_uhwi_p (pos) ? BIGGEST_ALIGNMENT
a1ab4c31
AC
2396 : BITS_PER_UNIT);
2397 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
2398 &DECL_FIELD_BIT_OFFSET (field_decl),
2399 DECL_OFFSET_ALIGN (field_decl), pos);
a1ab4c31
AC
2400 }
2401
2402 /* In addition to what our caller says, claim the field is addressable if we
2403 know that its type is not suitable.
2404
2405 The field may also be "technically" nonaddressable, meaning that even if
2406 we attempt to take the field's address we will actually get the address
2407 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
2408 value we have at this point is not accurate enough, so we don't account
2409 for this here and let finish_record_type decide. */
4c5a0615 2410 if (!addressable && !type_for_nonaliased_component_p (field_type))
a1ab4c31
AC
2411 addressable = 1;
2412
2413 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
2414
2415 return field_decl;
2416}
2417\f
a8e05f92
EB
2418/* Return a PARM_DECL node. PARAM_NAME is the name of the parameter and
2419 PARAM_TYPE is its type. READONLY is true if the parameter is readonly
2420 (either an In parameter or an address of a pass-by-ref parameter). */
a1ab4c31
AC
2421
2422tree
2423create_param_decl (tree param_name, tree param_type, bool readonly)
2424{
c172df28
AH
2425 tree param_decl = build_decl (input_location,
2426 PARM_DECL, param_name, param_type);
a1ab4c31 2427
a8e05f92
EB
2428 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
2429 can lead to various ABI violations. */
2430 if (targetm.calls.promote_prototypes (NULL_TREE)
2431 && INTEGRAL_TYPE_P (param_type)
a1ab4c31
AC
2432 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
2433 {
2434 /* We have to be careful about biased types here. Make a subtype
2435 of integer_type_node with the proper biasing. */
2436 if (TREE_CODE (param_type) == INTEGER_TYPE
2437 && TYPE_BIASED_REPRESENTATION_P (param_type))
2438 {
84fb43a1
EB
2439 tree subtype
2440 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
c1abd261
EB
2441 TREE_TYPE (subtype) = integer_type_node;
2442 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
84fb43a1
EB
2443 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (param_type));
2444 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (param_type));
c1abd261 2445 param_type = subtype;
a1ab4c31
AC
2446 }
2447 else
2448 param_type = integer_type_node;
2449 }
2450
2451 DECL_ARG_TYPE (param_decl) = param_type;
2452 TREE_READONLY (param_decl) = readonly;
2453 return param_decl;
2454}
2455\f
74746d49
EB
2456/* Process the attributes in ATTR_LIST for NODE, which is either a DECL or
2457 a TYPE. If IN_PLACE is true, the tree pointed to by NODE should not be
2458 changed. GNAT_NODE is used for the position of error messages. */
a1ab4c31 2459
74746d49
EB
2460void
2461process_attributes (tree *node, struct attrib **attr_list, bool in_place,
2462 Node_Id gnat_node)
a1ab4c31 2463{
74746d49
EB
2464 struct attrib *attr;
2465
2466 for (attr = *attr_list; attr; attr = attr->next)
2467 switch (attr->type)
a1ab4c31
AC
2468 {
2469 case ATTR_MACHINE_ATTRIBUTE:
74746d49
EB
2470 Sloc_to_locus (Sloc (gnat_node), &input_location);
2471 decl_attributes (node, tree_cons (attr->name, attr->args, NULL_TREE),
2472 in_place ? ATTR_FLAG_TYPE_IN_PLACE : 0);
a1ab4c31
AC
2473 break;
2474
2475 case ATTR_LINK_ALIAS:
74746d49 2476 if (!DECL_EXTERNAL (*node))
a1ab4c31 2477 {
74746d49
EB
2478 TREE_STATIC (*node) = 1;
2479 assemble_alias (*node, attr->name);
a1ab4c31
AC
2480 }
2481 break;
2482
2483 case ATTR_WEAK_EXTERNAL:
2484 if (SUPPORTS_WEAK)
74746d49 2485 declare_weak (*node);
a1ab4c31
AC
2486 else
2487 post_error ("?weak declarations not supported on this target",
74746d49 2488 attr->error_point);
a1ab4c31
AC
2489 break;
2490
2491 case ATTR_LINK_SECTION:
677f3fa8 2492 if (targetm_common.have_named_sections)
a1ab4c31 2493 {
74746d49
EB
2494 DECL_SECTION_NAME (*node)
2495 = build_string (IDENTIFIER_LENGTH (attr->name),
2496 IDENTIFIER_POINTER (attr->name));
2497 DECL_COMMON (*node) = 0;
a1ab4c31
AC
2498 }
2499 else
2500 post_error ("?section attributes are not supported for this target",
74746d49 2501 attr->error_point);
a1ab4c31
AC
2502 break;
2503
2504 case ATTR_LINK_CONSTRUCTOR:
74746d49
EB
2505 DECL_STATIC_CONSTRUCTOR (*node) = 1;
2506 TREE_USED (*node) = 1;
a1ab4c31
AC
2507 break;
2508
2509 case ATTR_LINK_DESTRUCTOR:
74746d49
EB
2510 DECL_STATIC_DESTRUCTOR (*node) = 1;
2511 TREE_USED (*node) = 1;
a1ab4c31 2512 break;
40a14772
TG
2513
2514 case ATTR_THREAD_LOCAL_STORAGE:
74746d49
EB
2515 DECL_TLS_MODEL (*node) = decl_default_tls_model (*node);
2516 DECL_COMMON (*node) = 0;
40a14772 2517 break;
a1ab4c31 2518 }
74746d49
EB
2519
2520 *attr_list = NULL;
a1ab4c31
AC
2521}
2522\f
feec4372 2523/* Record DECL as a global renaming pointer. */
a1ab4c31
AC
2524
2525void
2526record_global_renaming_pointer (tree decl)
2527{
15bf7d19 2528 gcc_assert (!DECL_LOOP_PARM_P (decl) && DECL_RENAMED_OBJECT (decl));
9771b263 2529 vec_safe_push (global_renaming_pointers, decl);
a1ab4c31
AC
2530}
2531
2532/* Invalidate the global renaming pointers. */
2533
2534void
2535invalidate_global_renaming_pointers (void)
2536{
2537 unsigned int i;
2538 tree iter;
2539
9771b263
DN
2540 if (global_renaming_pointers == NULL)
2541 return;
2542
2543 FOR_EACH_VEC_ELT (*global_renaming_pointers, i, iter)
a1ab4c31
AC
2544 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
2545
9771b263 2546 vec_free (global_renaming_pointers);
a1ab4c31
AC
2547}
2548
2549/* Return true if VALUE is a known to be a multiple of FACTOR, which must be
2550 a power of 2. */
2551
2552bool
2553value_factor_p (tree value, HOST_WIDE_INT factor)
2554{
cc269bb6 2555 if (tree_fits_uhwi_p (value))
ae7e9ddd 2556 return tree_to_uhwi (value) % factor == 0;
a1ab4c31
AC
2557
2558 if (TREE_CODE (value) == MULT_EXPR)
2559 return (value_factor_p (TREE_OPERAND (value, 0), factor)
2560 || value_factor_p (TREE_OPERAND (value, 1), factor));
2561
2562 return false;
2563}
2564
5c475ba9
EB
2565/* Return VALUE scaled by the biggest power-of-2 factor of EXPR. */
2566
2567static unsigned int
2568scale_by_factor_of (tree expr, unsigned int value)
2569{
2570 expr = remove_conversions (expr, true);
2571
2572 /* An expression which is a bitwise AND with a mask has a power-of-2 factor
2573 corresponding to the number of trailing zeros of the mask. */
2574 if (TREE_CODE (expr) == BIT_AND_EXPR
2575 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST)
2576 {
2577 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
2578 unsigned int i = 0;
2579
2580 while ((mask & 1) == 0 && i < HOST_BITS_PER_WIDE_INT)
2581 {
2582 mask >>= 1;
2583 value *= 2;
2584 i++;
2585 }
2586 }
2587
2588 return value;
2589}
2590
7d7fcb08 2591/* Given two consecutive field decls PREV_FIELD and CURR_FIELD, return true
a1ab4c31
AC
2592 unless we can prove these 2 fields are laid out in such a way that no gap
2593 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
2594 is the distance in bits between the end of PREV_FIELD and the starting
2595 position of CURR_FIELD. It is ignored if null. */
2596
2597static bool
2598potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
2599{
2600 /* If this is the first field of the record, there cannot be any gap */
2601 if (!prev_field)
2602 return false;
2603
78df6221 2604 /* If the previous field is a union type, then return false: The only
a1ab4c31
AC
2605 time when such a field is not the last field of the record is when
2606 there are other components at fixed positions after it (meaning there
2607 was a rep clause for every field), in which case we don't want the
2608 alignment constraint to override them. */
2609 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
2610 return false;
2611
2612 /* If the distance between the end of prev_field and the beginning of
2613 curr_field is constant, then there is a gap if the value of this
2614 constant is not null. */
cc269bb6 2615 if (offset && tree_fits_uhwi_p (offset))
a1ab4c31
AC
2616 return !integer_zerop (offset);
2617
2618 /* If the size and position of the previous field are constant,
2619 then check the sum of this size and position. There will be a gap
2620 iff it is not multiple of the current field alignment. */
cc269bb6
RS
2621 if (tree_fits_uhwi_p (DECL_SIZE (prev_field))
2622 && tree_fits_uhwi_p (bit_position (prev_field)))
ae7e9ddd
RS
2623 return ((tree_to_uhwi (bit_position (prev_field))
2624 + tree_to_uhwi (DECL_SIZE (prev_field)))
a1ab4c31
AC
2625 % DECL_ALIGN (curr_field) != 0);
2626
2627 /* If both the position and size of the previous field are multiples
2628 of the current field alignment, there cannot be any gap. */
2629 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
2630 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
2631 return false;
2632
2633 /* Fallback, return that there may be a potential gap */
2634 return true;
2635}
2636
88a94e2b
EB
2637/* Return a LABEL_DECL with LABEL_NAME. GNAT_NODE is used for the position
2638 of the decl. */
a1ab4c31
AC
2639
2640tree
88a94e2b 2641create_label_decl (tree label_name, Node_Id gnat_node)
a1ab4c31 2642{
88a94e2b
EB
2643 tree label_decl
2644 = build_decl (input_location, LABEL_DECL, label_name, void_type_node);
a1ab4c31 2645
88a94e2b
EB
2646 DECL_MODE (label_decl) = VOIDmode;
2647
2648 /* Add this decl to the current binding level. */
2649 gnat_pushdecl (label_decl, gnat_node);
a1ab4c31
AC
2650
2651 return label_decl;
2652}
2653\f
7d7fcb08 2654/* Return a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
a1ab4c31
AC
2655 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
2656 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
7d76717d 2657 PARM_DECL nodes chained through the DECL_CHAIN field).
a1ab4c31 2658
0e24192c 2659 INLINE_STATUS, PUBLIC_FLAG, EXTERN_FLAG, ARTIFICIAL_FLAG and ATTR_LIST are
7d7fcb08
EB
2660 used to set the appropriate fields in the FUNCTION_DECL. GNAT_NODE is
2661 used for the position of the decl. */
a1ab4c31
AC
2662
2663tree
7d7fcb08 2664create_subprog_decl (tree subprog_name, tree asm_name, tree subprog_type,
0e24192c
EB
2665 tree param_decl_list, enum inline_status_t inline_status,
2666 bool public_flag, bool extern_flag, bool artificial_flag,
7d7fcb08 2667 struct attrib *attr_list, Node_Id gnat_node)
a1ab4c31 2668{
d47d0a8d
EB
2669 tree subprog_decl = build_decl (input_location, FUNCTION_DECL, subprog_name,
2670 subprog_type);
2671 tree result_decl = build_decl (input_location, RESULT_DECL, NULL_TREE,
2672 TREE_TYPE (subprog_type));
7d7fcb08 2673 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
a1ab4c31 2674
d84b344a
EB
2675 /* If this is a non-inline function nested inside an inlined external
2676 function, we cannot honor both requests without cloning the nested
2677 function in the current unit since it is private to the other unit.
2678 We could inline the nested function as well but it's probably better
2679 to err on the side of too little inlining. */
0e24192c 2680 if (inline_status != is_enabled
5daed84a 2681 && !public_flag
d84b344a
EB
2682 && current_function_decl
2683 && DECL_DECLARED_INLINE_P (current_function_decl)
a1ab4c31 2684 && DECL_EXTERNAL (current_function_decl))
d84b344a 2685 DECL_DECLARED_INLINE_P (current_function_decl) = 0;
a1ab4c31 2686
7d7fcb08
EB
2687 DECL_ARTIFICIAL (subprog_decl) = artificial_flag;
2688 DECL_EXTERNAL (subprog_decl) = extern_flag;
0e24192c
EB
2689
2690 switch (inline_status)
2691 {
2692 case is_suppressed:
2693 DECL_UNINLINABLE (subprog_decl) = 1;
2694 break;
2695
2696 case is_disabled:
2697 break;
2698
2699 case is_enabled:
2700 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
2701 DECL_NO_INLINE_WARNING_P (subprog_decl) = artificial_flag;
2702 break;
2703
2704 default:
2705 gcc_unreachable ();
2706 }
7d7fcb08
EB
2707
2708 TREE_PUBLIC (subprog_decl) = public_flag;
2709 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
a1ab4c31
AC
2710 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
2711 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
a1ab4c31 2712
d47d0a8d
EB
2713 DECL_ARTIFICIAL (result_decl) = 1;
2714 DECL_IGNORED_P (result_decl) = 1;
2715 DECL_BY_REFERENCE (result_decl) = TREE_ADDRESSABLE (subprog_type);
2716 DECL_RESULT (subprog_decl) = result_decl;
a1ab4c31 2717
a1ab4c31
AC
2718 if (asm_name)
2719 {
2720 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
2721
2722 /* The expand_main_function circuitry expects "main_identifier_node" to
2723 designate the DECL_NAME of the 'main' entry point, in turn expected
2724 to be declared as the "main" function literally by default. Ada
2725 program entry points are typically declared with a different name
2726 within the binder generated file, exported as 'main' to satisfy the
cfbb663c 2727 system expectations. Force main_identifier_node in this case. */
a1ab4c31 2728 if (asm_name == main_identifier_node)
cfbb663c 2729 DECL_NAME (subprog_decl) = main_identifier_node;
a1ab4c31
AC
2730 }
2731
74746d49
EB
2732 process_attributes (&subprog_decl, &attr_list, true, gnat_node);
2733
a1ab4c31
AC
2734 /* Add this decl to the current binding level. */
2735 gnat_pushdecl (subprog_decl, gnat_node);
2736
2737 /* Output the assembler code and/or RTL for the declaration. */
2738 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
2739
2740 return subprog_decl;
2741}
2742\f
2743/* Set up the framework for generating code for SUBPROG_DECL, a subprogram
2744 body. This routine needs to be invoked before processing the declarations
2745 appearing in the subprogram. */
2746
2747void
2748begin_subprog_body (tree subprog_decl)
2749{
2750 tree param_decl;
2751
a1ab4c31
AC
2752 announce_function (subprog_decl);
2753
0ae44446
JR
2754 /* This function is being defined. */
2755 TREE_STATIC (subprog_decl) = 1;
2756
58c8f770
EB
2757 current_function_decl = subprog_decl;
2758
a1ab4c31
AC
2759 /* Enter a new binding level and show that all the parameters belong to
2760 this function. */
2761 gnat_pushlevel ();
a09d56d8 2762
a1ab4c31 2763 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
910ad8de 2764 param_decl = DECL_CHAIN (param_decl))
a1ab4c31
AC
2765 DECL_CONTEXT (param_decl) = subprog_decl;
2766
2767 make_decl_rtl (subprog_decl);
a1ab4c31
AC
2768}
2769
71196d4e 2770/* Finish translating the current subprogram and set its BODY. */
a1ab4c31
AC
2771
2772void
a406865a 2773end_subprog_body (tree body)
a1ab4c31
AC
2774{
2775 tree fndecl = current_function_decl;
2776
bd9c7fb9 2777 /* Attach the BLOCK for this level to the function and pop the level. */
a1ab4c31
AC
2778 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
2779 DECL_INITIAL (fndecl) = current_binding_level->block;
2780 gnat_poplevel ();
2781
a1ab4c31
AC
2782 /* Mark the RESULT_DECL as being in this subprogram. */
2783 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
2784
a963da4d
EB
2785 /* The body should be a BIND_EXPR whose BLOCK is the top-level one. */
2786 if (TREE_CODE (body) == BIND_EXPR)
2787 {
2788 BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body)) = fndecl;
2789 DECL_INITIAL (fndecl) = BIND_EXPR_BLOCK (body);
2790 }
2791
a1ab4c31
AC
2792 DECL_SAVED_TREE (fndecl) = body;
2793
228ee426 2794 current_function_decl = decl_function_context (fndecl);
71196d4e
EB
2795}
2796
2797/* Wrap up compilation of SUBPROG_DECL, a subprogram body. */
a1ab4c31 2798
71196d4e
EB
2799void
2800rest_of_subprog_body_compilation (tree subprog_decl)
2801{
a1ab4c31
AC
2802 /* We cannot track the location of errors past this point. */
2803 error_gnat_node = Empty;
2804
2805 /* If we're only annotating types, don't actually compile this function. */
2806 if (type_annotate_only)
2807 return;
2808
a406865a 2809 /* Dump functions before gimplification. */
71196d4e 2810 dump_function (TDI_original, subprog_decl);
a406865a 2811
228ee426 2812 if (!decl_function_context (subprog_decl))
71196d4e 2813 cgraph_finalize_function (subprog_decl, false);
a1ab4c31
AC
2814 else
2815 /* Register this function with cgraph just far enough to get it
2816 added to our parent's nested function list. */
71196d4e 2817 (void) cgraph_get_create_node (subprog_decl);
a1ab4c31
AC
2818}
2819
a1ab4c31
AC
2820tree
2821gnat_builtin_function (tree decl)
2822{
2823 gnat_pushdecl (decl, Empty);
2824 return decl;
2825}
2826
2827/* Return an integer type with the number of bits of precision given by
2828 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2829 it is a signed type. */
2830
2831tree
2832gnat_type_for_size (unsigned precision, int unsignedp)
2833{
2834 tree t;
2835 char type_name[20];
2836
2837 if (precision <= 2 * MAX_BITS_PER_WORD
2838 && signed_and_unsigned_types[precision][unsignedp])
2839 return signed_and_unsigned_types[precision][unsignedp];
2840
2841 if (unsignedp)
2842 t = make_unsigned_type (precision);
2843 else
2844 t = make_signed_type (precision);
2845
2846 if (precision <= 2 * MAX_BITS_PER_WORD)
2847 signed_and_unsigned_types[precision][unsignedp] = t;
2848
2849 if (!TYPE_NAME (t))
2850 {
c3d79c60 2851 sprintf (type_name, "%sSIGNED_%u", unsignedp ? "UN" : "", precision);
a1ab4c31
AC
2852 TYPE_NAME (t) = get_identifier (type_name);
2853 }
2854
2855 return t;
2856}
2857
2858/* Likewise for floating-point types. */
2859
2860static tree
2861float_type_for_precision (int precision, enum machine_mode mode)
2862{
2863 tree t;
2864 char type_name[20];
2865
2866 if (float_types[(int) mode])
2867 return float_types[(int) mode];
2868
2869 float_types[(int) mode] = t = make_node (REAL_TYPE);
2870 TYPE_PRECISION (t) = precision;
2871 layout_type (t);
2872
2873 gcc_assert (TYPE_MODE (t) == mode);
2874 if (!TYPE_NAME (t))
2875 {
2876 sprintf (type_name, "FLOAT_%d", precision);
2877 TYPE_NAME (t) = get_identifier (type_name);
2878 }
2879
2880 return t;
2881}
2882
2883/* Return a data type that has machine mode MODE. UNSIGNEDP selects
2884 an unsigned type; otherwise a signed type is returned. */
2885
2886tree
2887gnat_type_for_mode (enum machine_mode mode, int unsignedp)
2888{
2889 if (mode == BLKmode)
2890 return NULL_TREE;
2799d18c
EB
2891
2892 if (mode == VOIDmode)
a1ab4c31 2893 return void_type_node;
2799d18c
EB
2894
2895 if (COMPLEX_MODE_P (mode))
a1ab4c31 2896 return NULL_TREE;
2799d18c
EB
2897
2898 if (SCALAR_FLOAT_MODE_P (mode))
a1ab4c31 2899 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2799d18c
EB
2900
2901 if (SCALAR_INT_MODE_P (mode))
a1ab4c31 2902 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2799d18c
EB
2903
2904 if (VECTOR_MODE_P (mode))
2905 {
2906 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2907 tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
2908 if (inner_type)
2909 return build_vector_type_for_mode (inner_type, mode);
2910 }
2911
2912 return NULL_TREE;
a1ab4c31
AC
2913}
2914
2915/* Return the unsigned version of a TYPE_NODE, a scalar type. */
2916
2917tree
2918gnat_unsigned_type (tree type_node)
2919{
2920 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2921
2922 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2923 {
2924 type = copy_node (type);
2925 TREE_TYPE (type) = type_node;
2926 }
2927 else if (TREE_TYPE (type_node)
2928 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2929 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2930 {
2931 type = copy_node (type);
2932 TREE_TYPE (type) = TREE_TYPE (type_node);
2933 }
2934
2935 return type;
2936}
2937
2938/* Return the signed version of a TYPE_NODE, a scalar type. */
2939
2940tree
2941gnat_signed_type (tree type_node)
2942{
2943 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2944
2945 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2946 {
2947 type = copy_node (type);
2948 TREE_TYPE (type) = type_node;
2949 }
2950 else if (TREE_TYPE (type_node)
2951 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2952 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2953 {
2954 type = copy_node (type);
2955 TREE_TYPE (type) = TREE_TYPE (type_node);
2956 }
2957
2958 return type;
2959}
2960
2961/* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
2962 transparently converted to each other. */
2963
2964int
2965gnat_types_compatible_p (tree t1, tree t2)
2966{
2967 enum tree_code code;
2968
2969 /* This is the default criterion. */
2970 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
2971 return 1;
2972
2973 /* We only check structural equivalence here. */
2974 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
2975 return 0;
2976
7948ae37
OH
2977 /* Vector types are also compatible if they have the same number of subparts
2978 and the same form of (scalar) element type. */
2979 if (code == VECTOR_TYPE
2980 && TYPE_VECTOR_SUBPARTS (t1) == TYPE_VECTOR_SUBPARTS (t2)
2981 && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
2982 && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
2983 return 1;
2984
cfa0bd19 2985 /* Array types are also compatible if they are constrained and have the same
96bba5e6 2986 domain(s) and the same component type. */
a1ab4c31 2987 if (code == ARRAY_TYPE
0adef32b
JJ
2988 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
2989 || (TYPE_DOMAIN (t1)
b4680ca1 2990 && TYPE_DOMAIN (t2)
0adef32b
JJ
2991 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
2992 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
2993 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
cfa0bd19 2994 TYPE_MAX_VALUE (TYPE_DOMAIN (t2)))))
96bba5e6
EB
2995 && (TREE_TYPE (t1) == TREE_TYPE (t2)
2996 || (TREE_CODE (TREE_TYPE (t1)) == ARRAY_TYPE
2997 && gnat_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))))
a1ab4c31
AC
2998 return 1;
2999
a1ab4c31
AC
3000 return 0;
3001}
523e82a7 3002
71196d4e
EB
3003/* Return true if EXPR is a useless type conversion. */
3004
3005bool
3006gnat_useless_type_conversion (tree expr)
3007{
3008 if (CONVERT_EXPR_P (expr)
3009 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
3010 || TREE_CODE (expr) == NON_LVALUE_EXPR)
3011 return gnat_types_compatible_p (TREE_TYPE (expr),
3012 TREE_TYPE (TREE_OPERAND (expr, 0)));
3013
3014 return false;
3015}
3016
523e82a7
EB
3017/* Return true if T, a FUNCTION_TYPE, has the specified list of flags. */
3018
3019bool
3020fntype_same_flags_p (const_tree t, tree cico_list, bool return_unconstrained_p,
3021 bool return_by_direct_ref_p, bool return_by_invisi_ref_p)
3022{
3023 return TYPE_CI_CO_LIST (t) == cico_list
3024 && TYPE_RETURN_UNCONSTRAINED_P (t) == return_unconstrained_p
3025 && TYPE_RETURN_BY_DIRECT_REF_P (t) == return_by_direct_ref_p
3026 && TREE_ADDRESSABLE (t) == return_by_invisi_ref_p;
3027}
a1ab4c31
AC
3028\f
3029/* EXP is an expression for the size of an object. If this size contains
3030 discriminant references, replace them with the maximum (if MAX_P) or
3031 minimum (if !MAX_P) possible value of the discriminant. */
3032
3033tree
3034max_size (tree exp, bool max_p)
3035{
3036 enum tree_code code = TREE_CODE (exp);
3037 tree type = TREE_TYPE (exp);
3038
3039 switch (TREE_CODE_CLASS (code))
3040 {
3041 case tcc_declaration:
3042 case tcc_constant:
3043 return exp;
3044
3045 case tcc_vl_exp:
3046 if (code == CALL_EXPR)
3047 {
f82a627c
EB
3048 tree t, *argarray;
3049 int n, i;
3050
3051 t = maybe_inline_call_in_expr (exp);
3052 if (t)
3053 return max_size (t, max_p);
a1ab4c31 3054
f82a627c
EB
3055 n = call_expr_nargs (exp);
3056 gcc_assert (n > 0);
2bb1fc26 3057 argarray = XALLOCAVEC (tree, n);
a1ab4c31
AC
3058 for (i = 0; i < n; i++)
3059 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
3060 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
3061 }
3062 break;
3063
3064 case tcc_reference:
3065 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
3066 modify. Otherwise, we treat it like a variable. */
3067 if (!CONTAINS_PLACEHOLDER_P (exp))
3068 return exp;
3069
3070 type = TREE_TYPE (TREE_OPERAND (exp, 1));
3071 return
3072 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), true);
3073
3074 case tcc_comparison:
3075 return max_p ? size_one_node : size_zero_node;
3076
3077 case tcc_unary:
ce3da0d0
EB
3078 if (code == NON_LVALUE_EXPR)
3079 return max_size (TREE_OPERAND (exp, 0), max_p);
6625d7bc 3080
ce3da0d0
EB
3081 return fold_build1 (code, type,
3082 max_size (TREE_OPERAND (exp, 0),
3083 code == NEGATE_EXPR ? !max_p : max_p));
3084
a1ab4c31 3085 case tcc_binary:
ce3da0d0
EB
3086 {
3087 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
3088 tree rhs = max_size (TREE_OPERAND (exp, 1),
3089 code == MINUS_EXPR ? !max_p : max_p);
3090
3091 /* Special-case wanting the maximum value of a MIN_EXPR.
3092 In that case, if one side overflows, return the other. */
3093 if (max_p && code == MIN_EXPR)
3094 {
3095 if (TREE_CODE (rhs) == INTEGER_CST && TREE_OVERFLOW (rhs))
3096 return lhs;
3097
3098 if (TREE_CODE (lhs) == INTEGER_CST && TREE_OVERFLOW (lhs))
3099 return rhs;
3100 }
3101
3102 /* Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
3103 overflowing and the RHS a variable. */
3104 if ((code == MINUS_EXPR || code == PLUS_EXPR)
3105 && TREE_CODE (lhs) == INTEGER_CST
3106 && TREE_OVERFLOW (lhs)
3107 && !TREE_CONSTANT (rhs))
3108 return lhs;
3109
3110 return size_binop (code, lhs, rhs);
3111 }
3112
a1ab4c31
AC
3113 case tcc_expression:
3114 switch (TREE_CODE_LENGTH (code))
3115 {
3116 case 1:
722356ce
EB
3117 if (code == SAVE_EXPR)
3118 return exp;
ce3da0d0
EB
3119
3120 return fold_build1 (code, type,
3121 max_size (TREE_OPERAND (exp, 0), max_p));
a1ab4c31
AC
3122
3123 case 2:
3124 if (code == COMPOUND_EXPR)
3125 return max_size (TREE_OPERAND (exp, 1), max_p);
3126
ce3da0d0
EB
3127 return fold_build2 (code, type,
3128 max_size (TREE_OPERAND (exp, 0), max_p),
3129 max_size (TREE_OPERAND (exp, 1), max_p));
a1ab4c31
AC
3130
3131 case 3:
722356ce 3132 if (code == COND_EXPR)
a1ab4c31
AC
3133 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
3134 max_size (TREE_OPERAND (exp, 1), max_p),
3135 max_size (TREE_OPERAND (exp, 2), max_p));
ce3da0d0
EB
3136
3137 default:
3138 break;
a1ab4c31
AC
3139 }
3140
3141 /* Other tree classes cannot happen. */
3142 default:
3143 break;
3144 }
3145
3146 gcc_unreachable ();
3147}
3148\f
3149/* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
3150 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
3151 Return a constructor for the template. */
3152
3153tree
3154build_template (tree template_type, tree array_type, tree expr)
3155{
9771b263 3156 vec<constructor_elt, va_gc> *template_elts = NULL;
a1ab4c31
AC
3157 tree bound_list = NULL_TREE;
3158 tree field;
3159
3160 while (TREE_CODE (array_type) == RECORD_TYPE
315cff15 3161 && (TYPE_PADDING_P (array_type)
a1ab4c31
AC
3162 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
3163 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
3164
3165 if (TREE_CODE (array_type) == ARRAY_TYPE
3166 || (TREE_CODE (array_type) == INTEGER_TYPE
3167 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
3168 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
3169
3170 /* First make the list for a CONSTRUCTOR for the template. Go down the
3171 field list of the template instead of the type chain because this
3172 array might be an Ada array of arrays and we can't tell where the
3173 nested arrays stop being the underlying object. */
3174
3175 for (field = TYPE_FIELDS (template_type); field;
3176 (bound_list
3177 ? (bound_list = TREE_CHAIN (bound_list))
3178 : (array_type = TREE_TYPE (array_type))),
910ad8de 3179 field = DECL_CHAIN (DECL_CHAIN (field)))
a1ab4c31
AC
3180 {
3181 tree bounds, min, max;
3182
3183 /* If we have a bound list, get the bounds from there. Likewise
3184 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
3185 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
3186 This will give us a maximum range. */
3187 if (bound_list)
3188 bounds = TREE_VALUE (bound_list);
3189 else if (TREE_CODE (array_type) == ARRAY_TYPE)
3190 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
3191 else if (expr && TREE_CODE (expr) == PARM_DECL
3192 && DECL_BY_COMPONENT_PTR_P (expr))
3193 bounds = TREE_TYPE (field);
3194 else
3195 gcc_unreachable ();
3196
3197 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
910ad8de 3198 max = convert (TREE_TYPE (DECL_CHAIN (field)), TYPE_MAX_VALUE (bounds));
a1ab4c31
AC
3199
3200 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
3201 substitute it from OBJECT. */
3202 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
3203 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
3204
0e228dd9 3205 CONSTRUCTOR_APPEND_ELT (template_elts, field, min);
910ad8de 3206 CONSTRUCTOR_APPEND_ELT (template_elts, DECL_CHAIN (field), max);
a1ab4c31
AC
3207 }
3208
0e228dd9 3209 return gnat_build_constructor (template_type, template_elts);
a1ab4c31
AC
3210}
3211\f
31a5a547
EB
3212/* Helper routine to make a descriptor field. FIELD_LIST is the list of decls
3213 being built; the new decl is chained on to the front of the list. */
3214
3215static tree
3216make_descriptor_field (const char *name, tree type, tree rec_type,
3217 tree initial, tree field_list)
3218{
3219 tree field
3220 = create_field_decl (get_identifier (name), type, rec_type, NULL_TREE,
3221 NULL_TREE, 0, 0);
3222
3223 DECL_INITIAL (field) = initial;
3224 DECL_CHAIN (field) = field_list;
3225 return field;
3226}
3227
58c8f770
EB
3228/* Build a 32-bit VMS descriptor from a Mechanism_Type, which must specify a
3229 descriptor type, and the GCC type of an object. Each FIELD_DECL in the
3230 type contains in its DECL_INITIAL the expression to use when a constructor
3231 is made for the type. GNAT_ENTITY is an entity used to print out an error
3232 message if the mechanism cannot be applied to an object of that type and
3233 also for the name. */
a1ab4c31
AC
3234
3235tree
d628c015 3236build_vms_descriptor32 (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
a1ab4c31
AC
3237{
3238 tree record_type = make_node (RECORD_TYPE);
31a5a547 3239 tree pointer32_type, pointer64_type;
788e5046 3240 tree field_list = NULL_TREE;
31a5a547
EB
3241 int klass, ndim, i, dtype = 0;
3242 tree inner_type, tem;
a1ab4c31 3243 tree *idx_arr;
a1ab4c31
AC
3244
3245 /* If TYPE is an unconstrained array, use the underlying array type. */
3246 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3247 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
3248
3249 /* If this is an array, compute the number of dimensions in the array,
3250 get the index types, and point to the inner type. */
3251 if (TREE_CODE (type) != ARRAY_TYPE)
3252 ndim = 0;
3253 else
3254 for (ndim = 1, inner_type = type;
3255 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
3256 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
3257 ndim++, inner_type = TREE_TYPE (inner_type))
3258 ;
3259
2bb1fc26 3260 idx_arr = XALLOCAVEC (tree, ndim);
a1ab4c31 3261
d628c015 3262 if (mech != By_Descriptor_NCA && mech != By_Short_Descriptor_NCA
a1ab4c31
AC
3263 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
3264 for (i = ndim - 1, inner_type = type;
3265 i >= 0;
3266 i--, inner_type = TREE_TYPE (inner_type))
3267 idx_arr[i] = TYPE_DOMAIN (inner_type);
3268 else
3269 for (i = 0, inner_type = type;
3270 i < ndim;
3271 i++, inner_type = TREE_TYPE (inner_type))
3272 idx_arr[i] = TYPE_DOMAIN (inner_type);
3273
3274 /* Now get the DTYPE value. */
3275 switch (TREE_CODE (type))
3276 {
3277 case INTEGER_TYPE:
3278 case ENUMERAL_TYPE:
01ddebf2 3279 case BOOLEAN_TYPE:
a1ab4c31 3280 if (TYPE_VAX_FLOATING_POINT_P (type))
ae7e9ddd 3281 switch (tree_to_uhwi (TYPE_DIGITS_VALUE (type)))
a1ab4c31
AC
3282 {
3283 case 6:
3284 dtype = 10;
3285 break;
3286 case 9:
3287 dtype = 11;
3288 break;
3289 case 15:
3290 dtype = 27;
3291 break;
3292 }
3293 else
3294 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
3295 {
3296 case 8:
3297 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
3298 break;
3299 case 16:
3300 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
3301 break;
3302 case 32:
3303 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
3304 break;
3305 case 64:
3306 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
3307 break;
3308 case 128:
3309 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
3310 break;
3311 }
3312 break;
3313
3314 case REAL_TYPE:
3315 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
3316 break;
3317
3318 case COMPLEX_TYPE:
3319 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
3320 && TYPE_VAX_FLOATING_POINT_P (type))
ae7e9ddd 3321 switch (tree_to_uhwi (TYPE_DIGITS_VALUE (type)))
a1ab4c31
AC
3322 {
3323 case 6:
3324 dtype = 12;
3325 break;
3326 case 9:
3327 dtype = 13;
3328 break;
3329 case 15:
3330 dtype = 29;
3331 }
3332 else
3333 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
3334 break;
3335
3336 case ARRAY_TYPE:
3337 dtype = 14;
3338 break;
3339
3340 default:
3341 break;
3342 }
3343
3344 /* Get the CLASS value. */
3345 switch (mech)
3346 {
3347 case By_Descriptor_A:
d628c015 3348 case By_Short_Descriptor_A:
c6bd4220 3349 klass = 4;
a1ab4c31
AC
3350 break;
3351 case By_Descriptor_NCA:
d628c015 3352 case By_Short_Descriptor_NCA:
c6bd4220 3353 klass = 10;
a1ab4c31
AC
3354 break;
3355 case By_Descriptor_SB:
d628c015 3356 case By_Short_Descriptor_SB:
c6bd4220 3357 klass = 15;
a1ab4c31
AC
3358 break;
3359 case By_Descriptor:
d628c015 3360 case By_Short_Descriptor:
a1ab4c31 3361 case By_Descriptor_S:
d628c015 3362 case By_Short_Descriptor_S:
a1ab4c31 3363 default:
c6bd4220 3364 klass = 1;
a1ab4c31
AC
3365 break;
3366 }
3367
58c8f770
EB
3368 /* Make the type for a descriptor for VMS. The first four fields are the
3369 same for all types. */
82b481ed
EB
3370 field_list
3371 = make_descriptor_field ("LENGTH", gnat_type_for_size (16, 1), record_type,
3372 size_in_bytes ((mech == By_Descriptor_A
3373 || mech == By_Short_Descriptor_A)
3374 ? inner_type : type),
3375 field_list);
3376 field_list
3377 = make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1), record_type,
3378 size_int (dtype), field_list);
3379 field_list
3380 = make_descriptor_field ("CLASS", gnat_type_for_size (8, 1), record_type,
3381 size_int (klass), field_list);
a1ab4c31 3382
a1ab4c31 3383 pointer32_type = build_pointer_type_for_mode (type, SImode, false);
31a5a547
EB
3384 pointer64_type = build_pointer_type_for_mode (type, DImode, false);
3385
3386 /* Ensure that only 32-bit pointers are passed in 32-bit descriptors. Note
3387 that we cannot build a template call to the CE routine as it would get a
3388 wrong source location; instead we use a second placeholder for it. */
3389 tem = build_unary_op (ADDR_EXPR, pointer64_type,
3390 build0 (PLACEHOLDER_EXPR, type));
3391 tem = build3 (COND_EXPR, pointer32_type,
f542c405
EB
3392 Pmode != SImode
3393 ? build_binary_op (GE_EXPR, boolean_type_node, tem,
3394 build_int_cstu (pointer64_type, 0x80000000))
3395 : boolean_false_node,
31a5a547
EB
3396 build0 (PLACEHOLDER_EXPR, void_type_node),
3397 convert (pointer32_type, tem));
a1ab4c31 3398
82b481ed 3399 field_list
31a5a547 3400 = make_descriptor_field ("POINTER", pointer32_type, record_type, tem,
82b481ed 3401 field_list);
a1ab4c31
AC
3402
3403 switch (mech)
3404 {
3405 case By_Descriptor:
d628c015 3406 case By_Short_Descriptor:
a1ab4c31 3407 case By_Descriptor_S:
d628c015 3408 case By_Short_Descriptor_S:
a1ab4c31
AC
3409 break;
3410
3411 case By_Descriptor_SB:
d628c015 3412 case By_Short_Descriptor_SB:
82b481ed
EB
3413 field_list
3414 = make_descriptor_field ("SB_L1", gnat_type_for_size (32, 1),
3415 record_type,
3416 (TREE_CODE (type) == ARRAY_TYPE
3417 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type))
3418 : size_zero_node),
3419 field_list);
3420 field_list
3421 = make_descriptor_field ("SB_U1", gnat_type_for_size (32, 1),
3422 record_type,
3423 (TREE_CODE (type) == ARRAY_TYPE
3424 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type))
3425 : size_zero_node),
3426 field_list);
a1ab4c31
AC
3427 break;
3428
3429 case By_Descriptor_A:
d628c015 3430 case By_Short_Descriptor_A:
a1ab4c31 3431 case By_Descriptor_NCA:
d628c015 3432 case By_Short_Descriptor_NCA:
82b481ed
EB
3433 field_list
3434 = make_descriptor_field ("SCALE", gnat_type_for_size (8, 1),
3435 record_type, size_zero_node, field_list);
3436
3437 field_list
3438 = make_descriptor_field ("DIGITS", gnat_type_for_size (8, 1),
3439 record_type, size_zero_node, field_list);
3440
82b481ed
EB
3441 field_list
3442 = make_descriptor_field ("AFLAGS", gnat_type_for_size (8, 1),
3443 record_type,
3444 size_int ((mech == By_Descriptor_NCA
3445 || mech == By_Short_Descriptor_NCA)
3446 ? 0
3447 /* Set FL_COLUMN, FL_COEFF, and
3448 FL_BOUNDS. */
3449 : (TREE_CODE (type) == ARRAY_TYPE
3450 && TYPE_CONVENTION_FORTRAN_P
3451 (type)
3452 ? 224 : 192)),
3453 field_list);
3454
3455 field_list
3456 = make_descriptor_field ("DIMCT", gnat_type_for_size (8, 1),
3457 record_type, size_int (ndim), field_list);
3458
3459 field_list
3460 = make_descriptor_field ("ARSIZE", gnat_type_for_size (32, 1),
3461 record_type, size_in_bytes (type),
3462 field_list);
a1ab4c31
AC
3463
3464 /* Now build a pointer to the 0,0,0... element. */
3465 tem = build0 (PLACEHOLDER_EXPR, type);
3466 for (i = 0, inner_type = type; i < ndim;
3467 i++, inner_type = TREE_TYPE (inner_type))
3468 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
3469 convert (TYPE_DOMAIN (inner_type), size_zero_node),
3470 NULL_TREE, NULL_TREE);
3471
82b481ed
EB
3472 field_list
3473 = make_descriptor_field ("A0", pointer32_type, record_type,
3474 build1 (ADDR_EXPR, pointer32_type, tem),
3475 field_list);
a1ab4c31
AC
3476
3477 /* Next come the addressing coefficients. */
3478 tem = size_one_node;
3479 for (i = 0; i < ndim; i++)
3480 {
3481 char fname[3];
3482 tree idx_length
3483 = size_binop (MULT_EXPR, tem,
3484 size_binop (PLUS_EXPR,
3485 size_binop (MINUS_EXPR,
3486 TYPE_MAX_VALUE (idx_arr[i]),
3487 TYPE_MIN_VALUE (idx_arr[i])),
3488 size_int (1)));
3489
d628c015
DR
3490 fname[0] = ((mech == By_Descriptor_NCA ||
3491 mech == By_Short_Descriptor_NCA) ? 'S' : 'M');
a1ab4c31 3492 fname[1] = '0' + i, fname[2] = 0;
82b481ed
EB
3493 field_list
3494 = make_descriptor_field (fname, gnat_type_for_size (32, 1),
3495 record_type, idx_length, field_list);
a1ab4c31 3496
d628c015 3497 if (mech == By_Descriptor_NCA || mech == By_Short_Descriptor_NCA)
a1ab4c31
AC
3498 tem = idx_length;
3499 }
3500
3501 /* Finally here are the bounds. */
3502 for (i = 0; i < ndim; i++)
3503 {
3504 char fname[3];
3505
3506 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
82b481ed
EB
3507 field_list
3508 = make_descriptor_field (fname, gnat_type_for_size (32, 1),
3509 record_type, TYPE_MIN_VALUE (idx_arr[i]),
3510 field_list);
a1ab4c31
AC
3511
3512 fname[0] = 'U';
82b481ed
EB
3513 field_list
3514 = make_descriptor_field (fname, gnat_type_for_size (32, 1),
3515 record_type, TYPE_MAX_VALUE (idx_arr[i]),
3516 field_list);
a1ab4c31
AC
3517 }
3518 break;
3519
3520 default:
3521 post_error ("unsupported descriptor type for &", gnat_entity);
3522 }
3523
10069d53 3524 TYPE_NAME (record_type) = create_concat_name (gnat_entity, "DESC");
788e5046 3525 finish_record_type (record_type, nreverse (field_list), 0, false);
a1ab4c31
AC
3526 return record_type;
3527}
3528
58c8f770
EB
3529/* Build a 64-bit VMS descriptor from a Mechanism_Type, which must specify a
3530 descriptor type, and the GCC type of an object. Each FIELD_DECL in the
3531 type contains in its DECL_INITIAL the expression to use when a constructor
3532 is made for the type. GNAT_ENTITY is an entity used to print out an error
3533 message if the mechanism cannot be applied to an object of that type and
3534 also for the name. */
6ca2b0a0
DR
3535
3536tree
d628c015 3537build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
6ca2b0a0 3538{
31a5a547 3539 tree record_type = make_node (RECORD_TYPE);
6ca2b0a0 3540 tree pointer64_type;
31a5a547
EB
3541 tree field_list = NULL_TREE;
3542 int klass, ndim, i, dtype = 0;
3543 tree inner_type, tem;
6ca2b0a0 3544 tree *idx_arr;
6ca2b0a0
DR
3545
3546 /* If TYPE is an unconstrained array, use the underlying array type. */
3547 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3548 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
3549
3550 /* If this is an array, compute the number of dimensions in the array,
3551 get the index types, and point to the inner type. */
3552 if (TREE_CODE (type) != ARRAY_TYPE)
3553 ndim = 0;
3554 else
3555 for (ndim = 1, inner_type = type;
3556 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
3557 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
3558 ndim++, inner_type = TREE_TYPE (inner_type))
3559 ;
3560
2bb1fc26 3561 idx_arr = XALLOCAVEC (tree, ndim);
6ca2b0a0
DR
3562
3563 if (mech != By_Descriptor_NCA
3564 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
3565 for (i = ndim - 1, inner_type = type;
3566 i >= 0;
3567 i--, inner_type = TREE_TYPE (inner_type))
3568 idx_arr[i] = TYPE_DOMAIN (inner_type);
3569 else
3570 for (i = 0, inner_type = type;
3571 i < ndim;
3572 i++, inner_type = TREE_TYPE (inner_type))
3573 idx_arr[i] = TYPE_DOMAIN (inner_type);
3574
3575 /* Now get the DTYPE value. */
3576 switch (TREE_CODE (type))
3577 {
3578 case INTEGER_TYPE:
3579 case ENUMERAL_TYPE:
01ddebf2 3580 case BOOLEAN_TYPE:
6ca2b0a0 3581 if (TYPE_VAX_FLOATING_POINT_P (type))
ae7e9ddd 3582 switch (tree_to_uhwi (TYPE_DIGITS_VALUE (type)))
6ca2b0a0
DR
3583 {
3584 case 6:
3585 dtype = 10;
3586 break;
3587 case 9:
3588 dtype = 11;
3589 break;
3590 case 15:
3591 dtype = 27;
3592 break;
3593 }
3594 else
3595 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
3596 {
3597 case 8:
3598 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
3599 break;
3600 case 16:
3601 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
3602 break;
3603 case 32:
3604 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
3605 break;
3606 case 64:
3607 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
3608 break;
3609 case 128:
3610 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
3611 break;
3612 }
3613 break;
3614
3615 case REAL_TYPE:
3616 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
3617 break;
3618
3619 case COMPLEX_TYPE:
3620 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
3621 && TYPE_VAX_FLOATING_POINT_P (type))
ae7e9ddd 3622 switch (tree_to_uhwi (TYPE_DIGITS_VALUE (type)))
6ca2b0a0
DR
3623 {
3624 case 6:
3625 dtype = 12;
3626 break;
3627 case 9:
3628 dtype = 13;
3629 break;
3630 case 15:
3631 dtype = 29;
3632 }
3633 else
3634 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
3635 break;
3636
3637 case ARRAY_TYPE:
3638 dtype = 14;
3639 break;
3640
3641 default:
3642 break;
3643 }
3644
3645 /* Get the CLASS value. */
3646 switch (mech)
3647 {
3648 case By_Descriptor_A:
c6bd4220 3649 klass = 4;
6ca2b0a0
DR
3650 break;
3651 case By_Descriptor_NCA:
c6bd4220 3652 klass = 10;
6ca2b0a0
DR
3653 break;
3654 case By_Descriptor_SB:
c6bd4220 3655 klass = 15;
6ca2b0a0
DR
3656 break;
3657 case By_Descriptor:
3658 case By_Descriptor_S:
3659 default:
c6bd4220 3660 klass = 1;
6ca2b0a0
DR
3661 break;
3662 }
3663
58c8f770 3664 /* Make the type for a 64-bit descriptor for VMS. The first six fields
6ca2b0a0 3665 are the same for all types. */
31a5a547 3666 field_list
788e5046 3667 = make_descriptor_field ("MBO", gnat_type_for_size (16, 1),
31a5a547
EB
3668 record_type, size_int (1), field_list);
3669 field_list
788e5046 3670 = make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1),
31a5a547
EB
3671 record_type, size_int (dtype), field_list);
3672 field_list
788e5046 3673 = make_descriptor_field ("CLASS", gnat_type_for_size (8, 1),
31a5a547
EB
3674 record_type, size_int (klass), field_list);
3675 field_list
788e5046 3676 = make_descriptor_field ("MBMO", gnat_type_for_size (32, 1),
f54ee980 3677 record_type, size_int (-1), field_list);
31a5a547 3678 field_list
788e5046 3679 = make_descriptor_field ("LENGTH", gnat_type_for_size (64, 1),
31a5a547 3680 record_type,
788e5046 3681 size_in_bytes (mech == By_Descriptor_A
82b481ed 3682 ? inner_type : type),
31a5a547 3683 field_list);
6ca2b0a0
DR
3684
3685 pointer64_type = build_pointer_type_for_mode (type, DImode, false);
3686
31a5a547
EB
3687 field_list
3688 = make_descriptor_field ("POINTER", pointer64_type, record_type,
788e5046 3689 build_unary_op (ADDR_EXPR, pointer64_type,
82b481ed 3690 build0 (PLACEHOLDER_EXPR, type)),
31a5a547 3691 field_list);
6ca2b0a0
DR
3692
3693 switch (mech)
3694 {
3695 case By_Descriptor:
3696 case By_Descriptor_S:
3697 break;
3698
3699 case By_Descriptor_SB:
31a5a547 3700 field_list
788e5046 3701 = make_descriptor_field ("SB_L1", gnat_type_for_size (64, 1),
31a5a547 3702 record_type,
788e5046
NF
3703 (TREE_CODE (type) == ARRAY_TYPE
3704 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type))
82b481ed 3705 : size_zero_node),
31a5a547
EB
3706 field_list);
3707 field_list
788e5046 3708 = make_descriptor_field ("SB_U1", gnat_type_for_size (64, 1),
31a5a547 3709 record_type,
788e5046
NF
3710 (TREE_CODE (type) == ARRAY_TYPE
3711 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type))
82b481ed 3712 : size_zero_node),
31a5a547 3713 field_list);
6ca2b0a0
DR
3714 break;
3715
3716 case By_Descriptor_A:
3717 case By_Descriptor_NCA:
31a5a547 3718 field_list
788e5046 3719 = make_descriptor_field ("SCALE", gnat_type_for_size (8, 1),
31a5a547 3720 record_type, size_zero_node, field_list);
788e5046 3721
31a5a547 3722 field_list
788e5046 3723 = make_descriptor_field ("DIGITS", gnat_type_for_size (8, 1),
31a5a547 3724 record_type, size_zero_node, field_list);
788e5046
NF
3725
3726 dtype = (mech == By_Descriptor_NCA
3727 ? 0
3728 /* Set FL_COLUMN, FL_COEFF, and
3729 FL_BOUNDS. */
3730 : (TREE_CODE (type) == ARRAY_TYPE
3731 && TYPE_CONVENTION_FORTRAN_P (type)
3732 ? 224 : 192));
31a5a547 3733 field_list
788e5046 3734 = make_descriptor_field ("AFLAGS", gnat_type_for_size (8, 1),
31a5a547
EB
3735 record_type, size_int (dtype),
3736 field_list);
6ca2b0a0 3737
31a5a547 3738 field_list
788e5046 3739 = make_descriptor_field ("DIMCT", gnat_type_for_size (8, 1),
31a5a547 3740 record_type, size_int (ndim), field_list);
788e5046 3741
31a5a547 3742 field_list
788e5046 3743 = make_descriptor_field ("MBZ", gnat_type_for_size (32, 1),
31a5a547
EB
3744 record_type, size_int (0), field_list);
3745 field_list
788e5046 3746 = make_descriptor_field ("ARSIZE", gnat_type_for_size (64, 1),
31a5a547
EB
3747 record_type, size_in_bytes (type),
3748 field_list);
6ca2b0a0
DR
3749
3750 /* Now build a pointer to the 0,0,0... element. */
3751 tem = build0 (PLACEHOLDER_EXPR, type);
3752 for (i = 0, inner_type = type; i < ndim;
3753 i++, inner_type = TREE_TYPE (inner_type))
3754 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
3755 convert (TYPE_DOMAIN (inner_type), size_zero_node),
3756 NULL_TREE, NULL_TREE);
3757
31a5a547
EB
3758 field_list
3759 = make_descriptor_field ("A0", pointer64_type, record_type,
788e5046 3760 build1 (ADDR_EXPR, pointer64_type, tem),
31a5a547 3761 field_list);
6ca2b0a0
DR
3762
3763 /* Next come the addressing coefficients. */
3764 tem = size_one_node;
3765 for (i = 0; i < ndim; i++)
3766 {
3767 char fname[3];
3768 tree idx_length
3769 = size_binop (MULT_EXPR, tem,
3770 size_binop (PLUS_EXPR,
3771 size_binop (MINUS_EXPR,
3772 TYPE_MAX_VALUE (idx_arr[i]),
3773 TYPE_MIN_VALUE (idx_arr[i])),
3774 size_int (1)));
3775
3776 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
3777 fname[1] = '0' + i, fname[2] = 0;
31a5a547 3778 field_list
788e5046 3779 = make_descriptor_field (fname, gnat_type_for_size (64, 1),
31a5a547 3780 record_type, idx_length, field_list);
6ca2b0a0
DR
3781
3782 if (mech == By_Descriptor_NCA)
3783 tem = idx_length;
3784 }
3785
3786 /* Finally here are the bounds. */
3787 for (i = 0; i < ndim; i++)
3788 {
3789 char fname[3];
3790
3791 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
31a5a547 3792 field_list
788e5046 3793 = make_descriptor_field (fname, gnat_type_for_size (64, 1),
31a5a547
EB
3794 record_type,
3795 TYPE_MIN_VALUE (idx_arr[i]), field_list);
6ca2b0a0
DR
3796
3797 fname[0] = 'U';
31a5a547 3798 field_list
788e5046 3799 = make_descriptor_field (fname, gnat_type_for_size (64, 1),
31a5a547
EB
3800 record_type,
3801 TYPE_MAX_VALUE (idx_arr[i]), field_list);
6ca2b0a0
DR
3802 }
3803 break;
3804
3805 default:
3806 post_error ("unsupported descriptor type for &", gnat_entity);
3807 }
3808
31a5a547
EB
3809 TYPE_NAME (record_type) = create_concat_name (gnat_entity, "DESC64");
3810 finish_record_type (record_type, nreverse (field_list), 0, false);
3811 return record_type;
6ca2b0a0
DR
3812}
3813
31a5a547
EB
3814/* Fill in a VMS descriptor of GNU_TYPE for GNU_EXPR and return the result.
3815 GNAT_ACTUAL is the actual parameter for which the descriptor is built. */
a1ab4c31 3816
31a5a547
EB
3817tree
3818fill_vms_descriptor (tree gnu_type, tree gnu_expr, Node_Id gnat_actual)
a1ab4c31 3819{
9771b263 3820 vec<constructor_elt, va_gc> *v = NULL;
31a5a547 3821 tree field;
a1ab4c31 3822
31a5a547
EB
3823 gnu_expr = maybe_unconstrained_array (gnu_expr);
3824 gnu_expr = gnat_protect_expr (gnu_expr);
3825 gnat_mark_addressable (gnu_expr);
3826
3827 /* We may need to substitute both GNU_EXPR and a CALL_EXPR to the raise CE
3828 routine in case we have a 32-bit descriptor. */
3829 gnu_expr = build2 (COMPOUND_EXPR, void_type_node,
3830 build_call_raise (CE_Range_Check_Failed, gnat_actual,
3831 N_Raise_Constraint_Error),
3832 gnu_expr);
3833
3834 for (field = TYPE_FIELDS (gnu_type); field; field = DECL_CHAIN (field))
3835 {
3836 tree value
3837 = convert (TREE_TYPE (field),
3838 SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_INITIAL (field),
3839 gnu_expr));
3840 CONSTRUCTOR_APPEND_ELT (v, field, value);
3841 }
3842
3843 return gnat_build_constructor (gnu_type, v);
a1ab4c31
AC
3844}
3845
d628c015
DR
3846/* Convert GNU_EXPR, a pointer to a 64bit VMS descriptor, to GNU_TYPE, a
3847 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3848 which the VMS descriptor is passed. */
a1ab4c31
AC
3849
3850static tree
d628c015
DR
3851convert_vms_descriptor64 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3852{
3853 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3854 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3855 /* The CLASS field is the 3rd field in the descriptor. */
910ad8de 3856 tree klass = DECL_CHAIN (DECL_CHAIN (TYPE_FIELDS (desc_type)));
d628c015 3857 /* The POINTER field is the 6th field in the descriptor. */
910ad8de 3858 tree pointer = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (klass)));
d628c015
DR
3859
3860 /* Retrieve the value of the POINTER field. */
3861 tree gnu_expr64
58c8f770 3862 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
d628c015
DR
3863
3864 if (POINTER_TYPE_P (gnu_type))
3865 return convert (gnu_type, gnu_expr64);
3866
315cff15 3867 else if (TYPE_IS_FAT_POINTER_P (gnu_type))
d628c015
DR
3868 {
3869 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
7d76717d 3870 tree p_bounds_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (gnu_type)));
d628c015
DR
3871 tree template_type = TREE_TYPE (p_bounds_type);
3872 tree min_field = TYPE_FIELDS (template_type);
7d76717d 3873 tree max_field = DECL_CHAIN (TYPE_FIELDS (template_type));
6bf68a93 3874 tree template_tree, template_addr, aflags, dimct, t, u;
d628c015 3875 /* See the head comment of build_vms_descriptor. */
c6bd4220 3876 int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
d628c015 3877 tree lfield, ufield;
9771b263 3878 vec<constructor_elt, va_gc> *v;
d628c015 3879
86060344 3880 /* Convert POINTER to the pointer-to-array type. */
d628c015
DR
3881 gnu_expr64 = convert (p_array_type, gnu_expr64);
3882
c6bd4220 3883 switch (iklass)
d628c015
DR
3884 {
3885 case 1: /* Class S */
3886 case 15: /* Class SB */
3887 /* Build {1, LENGTH} template; LENGTH64 is the 5th field. */
9771b263 3888 vec_alloc (v, 2);
910ad8de 3889 t = DECL_CHAIN (DECL_CHAIN (klass));
d628c015 3890 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
0e228dd9
NF
3891 CONSTRUCTOR_APPEND_ELT (v, min_field,
3892 convert (TREE_TYPE (min_field),
3893 integer_one_node));
3894 CONSTRUCTOR_APPEND_ELT (v, max_field,
3895 convert (TREE_TYPE (max_field), t));
3896 template_tree = gnat_build_constructor (template_type, v);
6bf68a93 3897 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
d628c015
DR
3898
3899 /* For class S, we are done. */
c6bd4220 3900 if (iklass == 1)
d628c015
DR
3901 break;
3902
3903 /* Test that we really have a SB descriptor, like DEC Ada. */
c6bd4220
EB
3904 t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
3905 u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
1139f2e8 3906 u = build_binary_op (EQ_EXPR, boolean_type_node, t, u);
d628c015
DR
3907 /* If so, there is already a template in the descriptor and
3908 it is located right after the POINTER field. The fields are
3909 64bits so they must be repacked. */
7d76717d 3910 t = DECL_CHAIN (pointer);
d628c015
DR
3911 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3912 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3913
7d76717d 3914 t = DECL_CHAIN (t);
d628c015
DR
3915 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3916 ufield = convert
910ad8de 3917 (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (template_type))), ufield);
d628c015
DR
3918
3919 /* Build the template in the form of a constructor. */
9771b263 3920 vec_alloc (v, 2);
0e228dd9 3921 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (template_type), lfield);
7d76717d 3922 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (template_type)),
0e228dd9
NF
3923 ufield);
3924 template_tree = gnat_build_constructor (template_type, v);
d628c015
DR
3925
3926 /* Otherwise use the {1, LENGTH} template we build above. */
3927 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3928 build_unary_op (ADDR_EXPR, p_bounds_type,
6bf68a93 3929 template_tree),
d628c015
DR
3930 template_addr);
3931 break;
3932
3933 case 4: /* Class A */
3934 /* The AFLAGS field is the 3rd field after the pointer in the
3935 descriptor. */
910ad8de 3936 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (pointer)));
d628c015
DR
3937 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3938 /* The DIMCT field is the next field in the descriptor after
3939 aflags. */
7d76717d 3940 t = DECL_CHAIN (t);
d628c015
DR
3941 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3942 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3943 or FL_COEFF or FL_BOUNDS not set. */
3944 u = build_int_cst (TREE_TYPE (aflags), 192);
1139f2e8
EB
3945 u = build_binary_op (TRUTH_OR_EXPR, boolean_type_node,
3946 build_binary_op (NE_EXPR, boolean_type_node,
d628c015
DR
3947 dimct,
3948 convert (TREE_TYPE (dimct),
3949 size_one_node)),
1139f2e8 3950 build_binary_op (NE_EXPR, boolean_type_node,
d628c015
DR
3951 build2 (BIT_AND_EXPR,
3952 TREE_TYPE (aflags),
3953 aflags, u),
3954 u));
3955 /* There is already a template in the descriptor and it is located
3956 in block 3. The fields are 64bits so they must be repacked. */
910ad8de 3957 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN
d628c015
DR
3958 (t)))));
3959 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3960 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3961
7d76717d 3962 t = DECL_CHAIN (t);
d628c015
DR
3963 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3964 ufield = convert
910ad8de 3965 (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (template_type))), ufield);
d628c015
DR
3966
3967 /* Build the template in the form of a constructor. */
9771b263 3968 vec_alloc (v, 2);
0e228dd9 3969 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (template_type), lfield);
910ad8de 3970 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (template_type)),
0e228dd9
NF
3971 ufield);
3972 template_tree = gnat_build_constructor (template_type, v);
f76d6e6f 3973 template_tree = build3 (COND_EXPR, template_type, u,
d628c015
DR
3974 build_call_raise (CE_Length_Check_Failed, Empty,
3975 N_Raise_Constraint_Error),
6bf68a93 3976 template_tree);
c6bd4220
EB
3977 template_addr
3978 = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
d628c015
DR
3979 break;
3980
3981 case 10: /* Class NCA */
3982 default:
3983 post_error ("unsupported descriptor type for &", gnat_subprog);
3984 template_addr = integer_zero_node;
3985 break;
3986 }
3987
3988 /* Build the fat pointer in the form of a constructor. */
9771b263 3989 vec_alloc (v, 2);
0e228dd9 3990 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (gnu_type), gnu_expr64);
910ad8de 3991 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (gnu_type)),
0e228dd9
NF
3992 template_addr);
3993 return gnat_build_constructor (gnu_type, v);
d628c015
DR
3994 }
3995
3996 else
3997 gcc_unreachable ();
3998}
3999
4000/* Convert GNU_EXPR, a pointer to a 32bit VMS descriptor, to GNU_TYPE, a
4001 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
4002 which the VMS descriptor is passed. */
4003
4004static tree
4005convert_vms_descriptor32 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
a1ab4c31
AC
4006{
4007 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
4008 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
4009 /* The CLASS field is the 3rd field in the descriptor. */
910ad8de 4010 tree klass = DECL_CHAIN (DECL_CHAIN (TYPE_FIELDS (desc_type)));
a1ab4c31 4011 /* The POINTER field is the 4th field in the descriptor. */
910ad8de 4012 tree pointer = DECL_CHAIN (klass);
a1ab4c31
AC
4013
4014 /* Retrieve the value of the POINTER field. */
d628c015 4015 tree gnu_expr32
a1ab4c31
AC
4016 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
4017
4018 if (POINTER_TYPE_P (gnu_type))
d628c015 4019 return convert (gnu_type, gnu_expr32);
a1ab4c31 4020
315cff15 4021 else if (TYPE_IS_FAT_POINTER_P (gnu_type))
a1ab4c31
AC
4022 {
4023 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
7d76717d 4024 tree p_bounds_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (gnu_type)));
a1ab4c31
AC
4025 tree template_type = TREE_TYPE (p_bounds_type);
4026 tree min_field = TYPE_FIELDS (template_type);
7d76717d 4027 tree max_field = DECL_CHAIN (TYPE_FIELDS (template_type));
6bf68a93 4028 tree template_tree, template_addr, aflags, dimct, t, u;
a1ab4c31 4029 /* See the head comment of build_vms_descriptor. */
c6bd4220 4030 int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
9771b263 4031 vec<constructor_elt, va_gc> *v;
a1ab4c31 4032
86060344 4033 /* Convert POINTER to the pointer-to-array type. */
d628c015 4034 gnu_expr32 = convert (p_array_type, gnu_expr32);
a1ab4c31 4035
c6bd4220 4036 switch (iklass)
a1ab4c31
AC
4037 {
4038 case 1: /* Class S */
4039 case 15: /* Class SB */
4040 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
9771b263 4041 vec_alloc (v, 2);
a1ab4c31
AC
4042 t = TYPE_FIELDS (desc_type);
4043 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
0e228dd9
NF
4044 CONSTRUCTOR_APPEND_ELT (v, min_field,
4045 convert (TREE_TYPE (min_field),
4046 integer_one_node));
4047 CONSTRUCTOR_APPEND_ELT (v, max_field,
4048 convert (TREE_TYPE (max_field), t));
4049 template_tree = gnat_build_constructor (template_type, v);
6bf68a93 4050 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
a1ab4c31
AC
4051
4052 /* For class S, we are done. */
c6bd4220 4053 if (iklass == 1)
a1ab4c31
AC
4054 break;
4055
4056 /* Test that we really have a SB descriptor, like DEC Ada. */
c6bd4220
EB
4057 t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
4058 u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
1139f2e8 4059 u = build_binary_op (EQ_EXPR, boolean_type_node, t, u);
a1ab4c31
AC
4060 /* If so, there is already a template in the descriptor and
4061 it is located right after the POINTER field. */
7d76717d 4062 t = DECL_CHAIN (pointer);
c6bd4220
EB
4063 template_tree
4064 = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
a1ab4c31
AC
4065 /* Otherwise use the {1, LENGTH} template we build above. */
4066 template_addr = build3 (COND_EXPR, p_bounds_type, u,
4067 build_unary_op (ADDR_EXPR, p_bounds_type,
6bf68a93 4068 template_tree),
a1ab4c31
AC
4069 template_addr);
4070 break;
4071
4072 case 4: /* Class A */
4073 /* The AFLAGS field is the 7th field in the descriptor. */
910ad8de 4074 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (pointer)));
a1ab4c31
AC
4075 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
4076 /* The DIMCT field is the 8th field in the descriptor. */
7d76717d 4077 t = DECL_CHAIN (t);
a1ab4c31
AC
4078 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
4079 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
4080 or FL_COEFF or FL_BOUNDS not set. */
4081 u = build_int_cst (TREE_TYPE (aflags), 192);
1139f2e8
EB
4082 u = build_binary_op (TRUTH_OR_EXPR, boolean_type_node,
4083 build_binary_op (NE_EXPR, boolean_type_node,
a1ab4c31
AC
4084 dimct,
4085 convert (TREE_TYPE (dimct),
4086 size_one_node)),
1139f2e8 4087 build_binary_op (NE_EXPR, boolean_type_node,
a1ab4c31
AC
4088 build2 (BIT_AND_EXPR,
4089 TREE_TYPE (aflags),
4090 aflags, u),
4091 u));
a1ab4c31
AC
4092 /* There is already a template in the descriptor and it is
4093 located at the start of block 3 (12th field). */
910ad8de 4094 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (t))));
c6bd4220
EB
4095 template_tree
4096 = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
f76d6e6f 4097 template_tree = build3 (COND_EXPR, TREE_TYPE (t), u,
d628c015
DR
4098 build_call_raise (CE_Length_Check_Failed, Empty,
4099 N_Raise_Constraint_Error),
6bf68a93 4100 template_tree);
c6bd4220
EB
4101 template_addr
4102 = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
a1ab4c31
AC
4103 break;
4104
4105 case 10: /* Class NCA */
4106 default:
4107 post_error ("unsupported descriptor type for &", gnat_subprog);
4108 template_addr = integer_zero_node;
4109 break;
4110 }
4111
4112 /* Build the fat pointer in the form of a constructor. */
9771b263 4113 vec_alloc (v, 2);
0e228dd9 4114 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (gnu_type), gnu_expr32);
910ad8de 4115 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (gnu_type)),
0e228dd9 4116 template_addr);
d628c015 4117
0e228dd9 4118 return gnat_build_constructor (gnu_type, v);
a1ab4c31
AC
4119 }
4120
4121 else
4122 gcc_unreachable ();
4123}
4124
a981c964
EB
4125/* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
4126 pointer or fat pointer type. GNU_EXPR_ALT_TYPE is the alternate (32-bit)
491f54a7
EB
4127 pointer type of GNU_EXPR. GNAT_SUBPROG is the subprogram to which the
4128 descriptor is passed. */
d628c015 4129
f3d34576 4130tree
a981c964 4131convert_vms_descriptor (tree gnu_type, tree gnu_expr, tree gnu_expr_alt_type,
491f54a7 4132 Entity_Id gnat_subprog)
d628c015
DR
4133{
4134 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
4135 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
4136 tree mbo = TYPE_FIELDS (desc_type);
4137 const char *mbostr = IDENTIFIER_POINTER (DECL_NAME (mbo));
910ad8de 4138 tree mbmo = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (mbo)));
491f54a7 4139 tree is64bit, gnu_expr32, gnu_expr64;
d628c015 4140
a981c964
EB
4141 /* If the field name is not MBO, it must be 32-bit and no alternate.
4142 Otherwise primary must be 64-bit and alternate 32-bit. */
d628c015 4143 if (strcmp (mbostr, "MBO") != 0)
0c700259 4144 {
491f54a7 4145 tree ret = convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
0c700259
EB
4146 return ret;
4147 }
d628c015 4148
a981c964 4149 /* Build the test for 64-bit descriptor. */
d628c015
DR
4150 mbo = build3 (COMPONENT_REF, TREE_TYPE (mbo), desc, mbo, NULL_TREE);
4151 mbmo = build3 (COMPONENT_REF, TREE_TYPE (mbmo), desc, mbmo, NULL_TREE);
a981c964 4152 is64bit
1139f2e8
EB
4153 = build_binary_op (TRUTH_ANDIF_EXPR, boolean_type_node,
4154 build_binary_op (EQ_EXPR, boolean_type_node,
a981c964
EB
4155 convert (integer_type_node, mbo),
4156 integer_one_node),
1139f2e8 4157 build_binary_op (EQ_EXPR, boolean_type_node,
a981c964
EB
4158 convert (integer_type_node, mbmo),
4159 integer_minus_one_node));
4160
4161 /* Build the 2 possible end results. */
491f54a7 4162 gnu_expr64 = convert_vms_descriptor64 (gnu_type, gnu_expr, gnat_subprog);
a981c964 4163 gnu_expr = fold_convert (gnu_expr_alt_type, gnu_expr);
491f54a7 4164 gnu_expr32 = convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
a981c964 4165 return build3 (COND_EXPR, gnu_type, is64bit, gnu_expr64, gnu_expr32);
d628c015 4166}
a1ab4c31 4167\f
928dfa4b
EB
4168/* Build a type to be used to represent an aliased object whose nominal type
4169 is an unconstrained array. This consists of a RECORD_TYPE containing a
4170 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
4171 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
4172 an arbitrary unconstrained object. Use NAME as the name of the record.
4173 DEBUG_INFO_P is true if we need to write debug information for the type. */
a1ab4c31
AC
4174
4175tree
928dfa4b
EB
4176build_unc_object_type (tree template_type, tree object_type, tree name,
4177 bool debug_info_p)
a1ab4c31
AC
4178{
4179 tree type = make_node (RECORD_TYPE);
da01bfee
EB
4180 tree template_field
4181 = create_field_decl (get_identifier ("BOUNDS"), template_type, type,
4182 NULL_TREE, NULL_TREE, 0, 1);
4183 tree array_field
4184 = create_field_decl (get_identifier ("ARRAY"), object_type, type,
4185 NULL_TREE, NULL_TREE, 0, 1);
a1ab4c31
AC
4186
4187 TYPE_NAME (type) = name;
4188 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
910ad8de 4189 DECL_CHAIN (template_field) = array_field;
928dfa4b
EB
4190 finish_record_type (type, template_field, 0, true);
4191
4192 /* Declare it now since it will never be declared otherwise. This is
4193 necessary to ensure that its subtrees are properly marked. */
74746d49 4194 create_type_decl (name, type, true, debug_info_p, Empty);
a1ab4c31
AC
4195
4196 return type;
4197}
4198
4199/* Same, taking a thin or fat pointer type instead of a template type. */
4200
4201tree
4202build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
928dfa4b 4203 tree name, bool debug_info_p)
a1ab4c31
AC
4204{
4205 tree template_type;
4206
315cff15 4207 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
a1ab4c31
AC
4208
4209 template_type
315cff15 4210 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
910ad8de 4211 ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
a1ab4c31 4212 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
928dfa4b
EB
4213
4214 return
4215 build_unc_object_type (template_type, object_type, name, debug_info_p);
a1ab4c31 4216}
a1ab4c31 4217\f
229077b0
EB
4218/* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
4219 In the normal case this is just two adjustments, but we have more to
4220 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
a1ab4c31
AC
4221
4222void
4223update_pointer_to (tree old_type, tree new_type)
4224{
4225 tree ptr = TYPE_POINTER_TO (old_type);
4226 tree ref = TYPE_REFERENCE_TO (old_type);
aeecf17c 4227 tree t;
a1ab4c31
AC
4228
4229 /* If this is the main variant, process all the other variants first. */
4230 if (TYPE_MAIN_VARIANT (old_type) == old_type)
aeecf17c
EB
4231 for (t = TYPE_NEXT_VARIANT (old_type); t; t = TYPE_NEXT_VARIANT (t))
4232 update_pointer_to (t, new_type);
a1ab4c31 4233
229077b0 4234 /* If no pointers and no references, we are done. */
a1ab4c31
AC
4235 if (!ptr && !ref)
4236 return;
4237
4238 /* Merge the old type qualifiers in the new type.
4239
4240 Each old variant has qualifiers for specific reasons, and the new
229077b0 4241 designated type as well. Each set of qualifiers represents useful
a1ab4c31
AC
4242 information grabbed at some point, and merging the two simply unifies
4243 these inputs into the final type description.
4244
4245 Consider for instance a volatile type frozen after an access to constant
229077b0
EB
4246 type designating it; after the designated type's freeze, we get here with
4247 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
4248 when the access type was processed. We will make a volatile and readonly
a1ab4c31
AC
4249 designated type, because that's what it really is.
4250
229077b0
EB
4251 We might also get here for a non-dummy OLD_TYPE variant with different
4252 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
a1ab4c31 4253 to private record type elaboration (see the comments around the call to
229077b0
EB
4254 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
4255 the qualifiers in those cases too, to avoid accidentally discarding the
4256 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
4257 new_type
4258 = build_qualified_type (new_type,
4259 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
4260
4261 /* If old type and new type are identical, there is nothing to do. */
a1ab4c31
AC
4262 if (old_type == new_type)
4263 return;
4264
4265 /* Otherwise, first handle the simple case. */
4266 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
4267 {
aeecf17c
EB
4268 tree new_ptr, new_ref;
4269
4270 /* If pointer or reference already points to new type, nothing to do.
4271 This can happen as update_pointer_to can be invoked multiple times
4272 on the same couple of types because of the type variants. */
4273 if ((ptr && TREE_TYPE (ptr) == new_type)
4274 || (ref && TREE_TYPE (ref) == new_type))
4275 return;
4276
4277 /* Chain PTR and its variants at the end. */
4278 new_ptr = TYPE_POINTER_TO (new_type);
4279 if (new_ptr)
4280 {
4281 while (TYPE_NEXT_PTR_TO (new_ptr))
4282 new_ptr = TYPE_NEXT_PTR_TO (new_ptr);
4283 TYPE_NEXT_PTR_TO (new_ptr) = ptr;
4284 }
4285 else
4286 TYPE_POINTER_TO (new_type) = ptr;
a1ab4c31 4287
aeecf17c 4288 /* Now adjust them. */
a1ab4c31 4289 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
aeecf17c 4290 for (t = TYPE_MAIN_VARIANT (ptr); t; t = TYPE_NEXT_VARIANT (t))
50179d58
EB
4291 {
4292 TREE_TYPE (t) = new_type;
4293 if (TYPE_NULL_BOUNDS (t))
4294 TREE_TYPE (TREE_OPERAND (TYPE_NULL_BOUNDS (t), 0)) = new_type;
4295 }
de9528f0 4296
aeecf17c
EB
4297 /* Chain REF and its variants at the end. */
4298 new_ref = TYPE_REFERENCE_TO (new_type);
4299 if (new_ref)
4300 {
4301 while (TYPE_NEXT_REF_TO (new_ref))
4302 new_ref = TYPE_NEXT_REF_TO (new_ref);
4303 TYPE_NEXT_REF_TO (new_ref) = ref;
4304 }
4305 else
4306 TYPE_REFERENCE_TO (new_type) = ref;
4307
4308 /* Now adjust them. */
a1ab4c31 4309 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
aeecf17c
EB
4310 for (t = TYPE_MAIN_VARIANT (ref); t; t = TYPE_NEXT_VARIANT (t))
4311 TREE_TYPE (t) = new_type;
de9528f0
EB
4312
4313 TYPE_POINTER_TO (old_type) = NULL_TREE;
3bd6ca3f 4314 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
a1ab4c31
AC
4315 }
4316
aeecf17c
EB
4317 /* Now deal with the unconstrained array case. In this case the pointer
4318 is actually a record where both fields are pointers to dummy nodes.
e3edbd56
EB
4319 Turn them into pointers to the correct types using update_pointer_to.
4320 Likewise for the pointer to the object record (thin pointer). */
a1ab4c31
AC
4321 else
4322 {
e3edbd56 4323 tree new_ptr = TYPE_POINTER_TO (new_type);
aeecf17c
EB
4324
4325 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr));
4326
e3edbd56 4327 /* If PTR already points to NEW_TYPE, nothing to do. This can happen
aeecf17c
EB
4328 since update_pointer_to can be invoked multiple times on the same
4329 couple of types because of the type variants. */
4330 if (TYPE_UNCONSTRAINED_ARRAY (ptr) == new_type)
4331 return;
4332
a1ab4c31 4333 update_pointer_to
e3edbd56
EB
4334 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
4335 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))));
a1ab4c31 4336
a1ab4c31 4337 update_pointer_to
e3edbd56
EB
4338 (TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr)))),
4339 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr)))));
aeecf17c 4340
e3edbd56
EB
4341 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type),
4342 TYPE_OBJECT_RECORD_TYPE (new_type));
a1ab4c31 4343
e3edbd56 4344 TYPE_POINTER_TO (old_type) = NULL_TREE;
a1ab4c31
AC
4345 }
4346}
4347\f
8df2e902
EB
4348/* Convert EXPR, a pointer to a constrained array, into a pointer to an
4349 unconstrained one. This involves making or finding a template. */
a1ab4c31
AC
4350
4351static tree
4352convert_to_fat_pointer (tree type, tree expr)
4353{
910ad8de 4354 tree template_type = TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))));
8df2e902 4355 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
a1ab4c31 4356 tree etype = TREE_TYPE (expr);
6bf68a93 4357 tree template_tree;
9771b263
DN
4358 vec<constructor_elt, va_gc> *v;
4359 vec_alloc (v, 2);
a1ab4c31 4360
50179d58
EB
4361 /* If EXPR is null, make a fat pointer that contains a null pointer to the
4362 array (compare_fat_pointers ensures that this is the full discriminant)
4363 and a valid pointer to the bounds. This latter property is necessary
4364 since the compiler can hoist the load of the bounds done through it. */
a1ab4c31 4365 if (integer_zerop (expr))
0e228dd9 4366 {
50179d58
EB
4367 tree ptr_template_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4368 tree null_bounds, t;
4369
4370 if (TYPE_NULL_BOUNDS (ptr_template_type))
4371 null_bounds = TYPE_NULL_BOUNDS (ptr_template_type);
4372 else
4373 {
4374 /* The template type can still be dummy at this point so we build an
4375 empty constructor. The middle-end will fill it in with zeros. */
9771b263
DN
4376 t = build_constructor (template_type,
4377 NULL);
50179d58
EB
4378 TREE_CONSTANT (t) = TREE_STATIC (t) = 1;
4379 null_bounds = build_unary_op (ADDR_EXPR, NULL_TREE, t);
4380 SET_TYPE_NULL_BOUNDS (ptr_template_type, null_bounds);
4381 }
4382
0e228dd9 4383 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
50179d58
EB
4384 fold_convert (p_array_type, null_pointer_node));
4385 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), null_bounds);
4386 t = build_constructor (type, v);
4387 /* Do not set TREE_CONSTANT so as to force T to static memory. */
4388 TREE_CONSTANT (t) = 0;
4389 TREE_STATIC (t) = 1;
4390
4391 return t;
0e228dd9 4392 }
a1ab4c31 4393
0d7de0e1
EB
4394 /* If EXPR is a thin pointer, make template and data from the record. */
4395 if (TYPE_IS_THIN_POINTER_P (etype))
a1ab4c31 4396 {
0d7de0e1 4397 tree field = TYPE_FIELDS (TREE_TYPE (etype));
a1ab4c31 4398
7d7a1fe8 4399 expr = gnat_protect_expr (expr);
a1ab4c31
AC
4400 if (TREE_CODE (expr) == ADDR_EXPR)
4401 expr = TREE_OPERAND (expr, 0);
4402 else
2b45154d
EB
4403 {
4404 /* If we have a TYPE_UNCONSTRAINED_ARRAY attached to the RECORD_TYPE,
4405 the thin pointer value has been shifted so we first need to shift
4406 it back to get the template address. */
4407 if (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)))
4408 expr
4409 = build_binary_op (POINTER_PLUS_EXPR, etype, expr,
4410 fold_build1 (NEGATE_EXPR, sizetype,
4411 byte_position
4412 (DECL_CHAIN (field))));
4413 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
4414 }
a1ab4c31 4415
0d7de0e1 4416 template_tree = build_component_ref (expr, NULL_TREE, field, false);
a1ab4c31
AC
4417 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
4418 build_component_ref (expr, NULL_TREE,
0d7de0e1 4419 DECL_CHAIN (field), false));
a1ab4c31 4420 }
8df2e902
EB
4421
4422 /* Otherwise, build the constructor for the template. */
a1ab4c31 4423 else
6bf68a93 4424 template_tree = build_template (template_type, TREE_TYPE (etype), expr);
a1ab4c31 4425
8df2e902 4426 /* The final result is a constructor for the fat pointer.
a1ab4c31 4427
8df2e902
EB
4428 If EXPR is an argument of a foreign convention subprogram, the type it
4429 points to is directly the component type. In this case, the expression
a1ab4c31 4430 type may not match the corresponding FIELD_DECL type at this point, so we
8df2e902 4431 call "convert" here to fix that up if necessary. This type consistency is
a1ab4c31 4432 required, for instance because it ensures that possible later folding of
8df2e902 4433 COMPONENT_REFs against this constructor always yields something of the
a1ab4c31
AC
4434 same type as the initial reference.
4435
8df2e902
EB
4436 Note that the call to "build_template" above is still fine because it
4437 will only refer to the provided TEMPLATE_TYPE in this case. */
0e228dd9
NF
4438 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4439 convert (p_array_type, expr));
910ad8de 4440 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
0e228dd9
NF
4441 build_unary_op (ADDR_EXPR, NULL_TREE,
4442 template_tree));
4443 return gnat_build_constructor (type, v);
a1ab4c31
AC
4444}
4445\f
a1ab4c31
AC
4446/* Create an expression whose value is that of EXPR,
4447 converted to type TYPE. The TREE_TYPE of the value
4448 is always TYPE. This function implements all reasonable
4449 conversions; callers should filter out those that are
4450 not permitted by the language being compiled. */
4451
4452tree
4453convert (tree type, tree expr)
4454{
a1ab4c31
AC
4455 tree etype = TREE_TYPE (expr);
4456 enum tree_code ecode = TREE_CODE (etype);
c34f3839 4457 enum tree_code code = TREE_CODE (type);
a1ab4c31 4458
c34f3839
EB
4459 /* If the expression is already of the right type, we are done. */
4460 if (etype == type)
a1ab4c31
AC
4461 return expr;
4462
4463 /* If both input and output have padding and are of variable size, do this
4464 as an unchecked conversion. Likewise if one is a mere variant of the
4465 other, so we avoid a pointless unpad/repad sequence. */
4466 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
315cff15 4467 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
a1ab4c31
AC
4468 && (!TREE_CONSTANT (TYPE_SIZE (type))
4469 || !TREE_CONSTANT (TYPE_SIZE (etype))
842d4ee2 4470 || TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
a1ab4c31
AC
4471 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
4472 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
4473 ;
4474
f88facfe
EB
4475 /* If the output type has padding, convert to the inner type and make a
4476 constructor to build the record, unless a variable size is involved. */
315cff15 4477 else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
a1ab4c31 4478 {
9771b263 4479 vec<constructor_elt, va_gc> *v;
0e228dd9 4480
a1ab4c31
AC
4481 /* If we previously converted from another type and our type is
4482 of variable size, remove the conversion to avoid the need for
f88facfe 4483 variable-sized temporaries. Likewise for a conversion between
a1ab4c31
AC
4484 original and packable version. */
4485 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4486 && (!TREE_CONSTANT (TYPE_SIZE (type))
4487 || (ecode == RECORD_TYPE
4488 && TYPE_NAME (etype)
4489 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
4490 expr = TREE_OPERAND (expr, 0);
4491
4492 /* If we are just removing the padding from expr, convert the original
4493 object if we have variable size in order to avoid the need for some
f88facfe 4494 variable-sized temporaries. Likewise if the padding is a variant
a1ab4c31
AC
4495 of the other, so we avoid a pointless unpad/repad sequence. */
4496 if (TREE_CODE (expr) == COMPONENT_REF
a1ab4c31
AC
4497 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
4498 && (!TREE_CONSTANT (TYPE_SIZE (type))
842d4ee2
EB
4499 || TYPE_MAIN_VARIANT (type)
4500 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))
a1ab4c31
AC
4501 || (ecode == RECORD_TYPE
4502 && TYPE_NAME (etype)
4503 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
4504 return convert (type, TREE_OPERAND (expr, 0));
4505
431cfac1
EB
4506 /* If the inner type is of self-referential size and the expression type
4507 is a record, do this as an unchecked conversion. But first pad the
4508 expression if possible to have the same size on both sides. */
c34f3839 4509 if (ecode == RECORD_TYPE
f88facfe 4510 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
431cfac1 4511 {
980a0501 4512 if (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST)
431cfac1 4513 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
980a0501
EB
4514 false, false, false, true),
4515 expr);
431cfac1
EB
4516 return unchecked_convert (type, expr, false);
4517 }
a1ab4c31 4518
f88facfe
EB
4519 /* If we are converting between array types with variable size, do the
4520 final conversion as an unchecked conversion, again to avoid the need
4521 for some variable-sized temporaries. If valid, this conversion is
4522 very likely purely technical and without real effects. */
c34f3839 4523 if (ecode == ARRAY_TYPE
f88facfe
EB
4524 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
4525 && !TREE_CONSTANT (TYPE_SIZE (etype))
4526 && !TREE_CONSTANT (TYPE_SIZE (type)))
4527 return unchecked_convert (type,
4528 convert (TREE_TYPE (TYPE_FIELDS (type)),
4529 expr),
4530 false);
4531
9771b263 4532 vec_alloc (v, 1);
0e228dd9
NF
4533 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4534 convert (TREE_TYPE (TYPE_FIELDS (type)), expr));
4535 return gnat_build_constructor (type, v);
a1ab4c31
AC
4536 }
4537
4538 /* If the input type has padding, remove it and convert to the output type.
4539 The conditions ordering is arranged to ensure that the output type is not
4540 a padding type here, as it is not clear whether the conversion would
4541 always be correct if this was to happen. */
315cff15 4542 else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
a1ab4c31
AC
4543 {
4544 tree unpadded;
4545
4546 /* If we have just converted to this padded type, just get the
4547 inner expression. */
4548 if (TREE_CODE (expr) == CONSTRUCTOR
9771b263
DN
4549 && !vec_safe_is_empty (CONSTRUCTOR_ELTS (expr))
4550 && (*CONSTRUCTOR_ELTS (expr))[0].index == TYPE_FIELDS (etype))
4551 unpadded = (*CONSTRUCTOR_ELTS (expr))[0].value;
a1ab4c31
AC
4552
4553 /* Otherwise, build an explicit component reference. */
4554 else
4555 unpadded
4556 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
4557
4558 return convert (type, unpadded);
4559 }
4560
4561 /* If the input is a biased type, adjust first. */
4562 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4563 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
4564 fold_convert (TREE_TYPE (etype),
4565 expr),
4566 TYPE_MIN_VALUE (etype)));
4567
4568 /* If the input is a justified modular type, we need to extract the actual
4569 object before converting it to any other type with the exceptions of an
4570 unconstrained array or of a mere type variant. It is useful to avoid the
4571 extraction and conversion in the type variant case because it could end
4572 up replacing a VAR_DECL expr by a constructor and we might be about the
4573 take the address of the result. */
4574 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
4575 && code != UNCONSTRAINED_ARRAY_TYPE
4576 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
4577 return convert (type, build_component_ref (expr, NULL_TREE,
4578 TYPE_FIELDS (etype), false));
4579
4580 /* If converting to a type that contains a template, convert to the data
4581 type and then build the template. */
4582 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
4583 {
910ad8de 4584 tree obj_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
9771b263
DN
4585 vec<constructor_elt, va_gc> *v;
4586 vec_alloc (v, 2);
a1ab4c31
AC
4587
4588 /* If the source already has a template, get a reference to the
4589 associated array only, as we are going to rebuild a template
4590 for the target type anyway. */
4591 expr = maybe_unconstrained_array (expr);
4592
0e228dd9
NF
4593 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4594 build_template (TREE_TYPE (TYPE_FIELDS (type)),
4595 obj_type, NULL_TREE));
910ad8de 4596 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
0e228dd9
NF
4597 convert (obj_type, expr));
4598 return gnat_build_constructor (type, v);
a1ab4c31
AC
4599 }
4600
a1c7d797 4601 /* There are some cases of expressions that we process specially. */
a1ab4c31
AC
4602 switch (TREE_CODE (expr))
4603 {
4604 case ERROR_MARK:
4605 return expr;
4606
4607 case NULL_EXPR:
4608 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
4609 conversion in gnat_expand_expr. NULL_EXPR does not represent
4610 and actual value, so no conversion is needed. */
4611 expr = copy_node (expr);
4612 TREE_TYPE (expr) = type;
4613 return expr;
4614
4615 case STRING_CST:
4616 /* If we are converting a STRING_CST to another constrained array type,
4617 just make a new one in the proper type. */
4618 if (code == ecode && AGGREGATE_TYPE_P (etype)
4619 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
4620 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
4621 {
4622 expr = copy_node (expr);
4623 TREE_TYPE (expr) = type;
4624 return expr;
4625 }
4626 break;
4627
7948ae37
OH
4628 case VECTOR_CST:
4629 /* If we are converting a VECTOR_CST to a mere variant type, just make
4630 a new one in the proper type. */
4631 if (code == ecode && gnat_types_compatible_p (type, etype))
4632 {
4633 expr = copy_node (expr);
4634 TREE_TYPE (expr) = type;
4635 return expr;
4636 }
4637
a1ab4c31
AC
4638 case CONSTRUCTOR:
4639 /* If we are converting a CONSTRUCTOR to a mere variant type, just make
4640 a new one in the proper type. */
4641 if (code == ecode && gnat_types_compatible_p (type, etype))
4642 {
4643 expr = copy_node (expr);
4644 TREE_TYPE (expr) = type;
9771b263 4645 CONSTRUCTOR_ELTS (expr) = vec_safe_copy (CONSTRUCTOR_ELTS (expr));
a1ab4c31
AC
4646 return expr;
4647 }
4648
cb3d597d
EB
4649 /* Likewise for a conversion between original and packable version, or
4650 conversion between types of the same size and with the same list of
4651 fields, but we have to work harder to preserve type consistency. */
a1ab4c31
AC
4652 if (code == ecode
4653 && code == RECORD_TYPE
cb3d597d
EB
4654 && (TYPE_NAME (type) == TYPE_NAME (etype)
4655 || tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (etype))))
4656
a1ab4c31 4657 {
9771b263
DN
4658 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4659 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4660 vec<constructor_elt, va_gc> *v;
4661 vec_alloc (v, len);
a1ab4c31
AC
4662 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
4663 unsigned HOST_WIDE_INT idx;
4664 tree index, value;
4665
db868e1e
OH
4666 /* Whether we need to clear TREE_CONSTANT et al. on the output
4667 constructor when we convert in place. */
4668 bool clear_constant = false;
4669
a1ab4c31
AC
4670 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
4671 {
cb3d597d
EB
4672 /* We expect only simple constructors. */
4673 if (!SAME_FIELD_P (index, efield))
4674 break;
4675 /* The field must be the same. */
4676 if (!SAME_FIELD_P (efield, field))
a1ab4c31 4677 break;
f32682ca 4678 constructor_elt elt = {field, convert (TREE_TYPE (field), value)};
9771b263 4679 v->quick_push (elt);
db868e1e
OH
4680
4681 /* If packing has made this field a bitfield and the input
4682 value couldn't be emitted statically any more, we need to
4683 clear TREE_CONSTANT on our output. */
ced57283
EB
4684 if (!clear_constant
4685 && TREE_CONSTANT (expr)
db868e1e
OH
4686 && !CONSTRUCTOR_BITFIELD_P (efield)
4687 && CONSTRUCTOR_BITFIELD_P (field)
4688 && !initializer_constant_valid_for_bitfield_p (value))
4689 clear_constant = true;
4690
910ad8de
NF
4691 efield = DECL_CHAIN (efield);
4692 field = DECL_CHAIN (field);
a1ab4c31
AC
4693 }
4694
db868e1e
OH
4695 /* If we have been able to match and convert all the input fields
4696 to their output type, convert in place now. We'll fallback to a
4697 view conversion downstream otherwise. */
a1ab4c31
AC
4698 if (idx == len)
4699 {
4700 expr = copy_node (expr);
4701 TREE_TYPE (expr) = type;
4702 CONSTRUCTOR_ELTS (expr) = v;
db868e1e 4703 if (clear_constant)
ced57283 4704 TREE_CONSTANT (expr) = TREE_STATIC (expr) = 0;
a1ab4c31
AC
4705 return expr;
4706 }
4707 }
7948ae37
OH
4708
4709 /* Likewise for a conversion between array type and vector type with a
4710 compatible representative array. */
4711 else if (code == VECTOR_TYPE
4712 && ecode == ARRAY_TYPE
4713 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4714 etype))
4715 {
9771b263
DN
4716 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4717 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4718 vec<constructor_elt, va_gc> *v;
7948ae37
OH
4719 unsigned HOST_WIDE_INT ix;
4720 tree value;
4721
4722 /* Build a VECTOR_CST from a *constant* array constructor. */
4723 if (TREE_CONSTANT (expr))
4724 {
4725 bool constant_p = true;
4726
4727 /* Iterate through elements and check if all constructor
4728 elements are *_CSTs. */
4729 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4730 if (!CONSTANT_CLASS_P (value))
4731 {
4732 constant_p = false;
4733 break;
4734 }
4735
4736 if (constant_p)
4737 return build_vector_from_ctor (type,
4738 CONSTRUCTOR_ELTS (expr));
4739 }
4740
4741 /* Otherwise, build a regular vector constructor. */
9771b263 4742 vec_alloc (v, len);
7948ae37
OH
4743 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4744 {
f32682ca 4745 constructor_elt elt = {NULL_TREE, value};
9771b263 4746 v->quick_push (elt);
7948ae37
OH
4747 }
4748 expr = copy_node (expr);
4749 TREE_TYPE (expr) = type;
4750 CONSTRUCTOR_ELTS (expr) = v;
4751 return expr;
4752 }
a1ab4c31
AC
4753 break;
4754
4755 case UNCONSTRAINED_ARRAY_REF:
a1c7d797
EB
4756 /* First retrieve the underlying array. */
4757 expr = maybe_unconstrained_array (expr);
4758 etype = TREE_TYPE (expr);
4759 ecode = TREE_CODE (etype);
4760 break;
a1ab4c31
AC
4761
4762 case VIEW_CONVERT_EXPR:
4763 {
4764 /* GCC 4.x is very sensitive to type consistency overall, and view
4765 conversions thus are very frequent. Even though just "convert"ing
4766 the inner operand to the output type is fine in most cases, it
4767 might expose unexpected input/output type mismatches in special
4768 circumstances so we avoid such recursive calls when we can. */
4769 tree op0 = TREE_OPERAND (expr, 0);
4770
4771 /* If we are converting back to the original type, we can just
4772 lift the input conversion. This is a common occurrence with
4773 switches back-and-forth amongst type variants. */
4774 if (type == TREE_TYPE (op0))
4775 return op0;
4776
7948ae37
OH
4777 /* Otherwise, if we're converting between two aggregate or vector
4778 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4779 target type in place or to just convert the inner expression. */
4780 if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4781 || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
a1ab4c31
AC
4782 {
4783 /* If we are converting between mere variants, we can just
4784 substitute the VIEW_CONVERT_EXPR in place. */
4785 if (gnat_types_compatible_p (type, etype))
4786 return build1 (VIEW_CONVERT_EXPR, type, op0);
4787
4788 /* Otherwise, we may just bypass the input view conversion unless
4789 one of the types is a fat pointer, which is handled by
4790 specialized code below which relies on exact type matching. */
315cff15
EB
4791 else if (!TYPE_IS_FAT_POINTER_P (type)
4792 && !TYPE_IS_FAT_POINTER_P (etype))
a1ab4c31
AC
4793 return convert (type, op0);
4794 }
ad1d36ba
EB
4795
4796 break;
a1ab4c31 4797 }
a1ab4c31 4798
a1ab4c31
AC
4799 default:
4800 break;
4801 }
4802
4803 /* Check for converting to a pointer to an unconstrained array. */
315cff15 4804 if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
a1ab4c31
AC
4805 return convert_to_fat_pointer (type, expr);
4806
7948ae37
OH
4807 /* If we are converting between two aggregate or vector types that are mere
4808 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4809 to a vector type from its representative array type. */
4810 else if ((code == ecode
4811 && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4812 && gnat_types_compatible_p (type, etype))
4813 || (code == VECTOR_TYPE
4814 && ecode == ARRAY_TYPE
4815 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4816 etype)))
a1ab4c31
AC
4817 return build1 (VIEW_CONVERT_EXPR, type, expr);
4818
76af763d
EB
4819 /* If we are converting between tagged types, try to upcast properly. */
4820 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4821 && TYPE_ALIGN_OK (etype) && TYPE_ALIGN_OK (type))
4822 {
4823 tree child_etype = etype;
4824 do {
4825 tree field = TYPE_FIELDS (child_etype);
4826 if (DECL_NAME (field) == parent_name_id && TREE_TYPE (field) == type)
4827 return build_component_ref (expr, NULL_TREE, field, false);
4828 child_etype = TREE_TYPE (field);
4829 } while (TREE_CODE (child_etype) == RECORD_TYPE);
4830 }
4831
bb1f7929
EB
4832 /* If we are converting from a smaller form of record type back to it, just
4833 make a VIEW_CONVERT_EXPR. But first pad the expression to have the same
4834 size on both sides. */
4835 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4836 && smaller_form_type_p (etype, type))
4837 {
4838 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4839 false, false, false, true),
4840 expr);
4841 return build1 (VIEW_CONVERT_EXPR, type, expr);
4842 }
4843
a1ab4c31 4844 /* In all other cases of related types, make a NOP_EXPR. */
86060344 4845 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
a1ab4c31
AC
4846 return fold_convert (type, expr);
4847
4848 switch (code)
4849 {
4850 case VOID_TYPE:
4851 return fold_build1 (CONVERT_EXPR, type, expr);
4852
a1ab4c31
AC
4853 case INTEGER_TYPE:
4854 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4855 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4856 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4857 return unchecked_convert (type, expr, false);
4858 else if (TYPE_BIASED_REPRESENTATION_P (type))
4859 return fold_convert (type,
4860 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4861 convert (TREE_TYPE (type), expr),
4862 TYPE_MIN_VALUE (type)));
4863
4864 /* ... fall through ... */
4865
4866 case ENUMERAL_TYPE:
01ddebf2 4867 case BOOLEAN_TYPE:
a1ab4c31
AC
4868 /* If we are converting an additive expression to an integer type
4869 with lower precision, be wary of the optimization that can be
4870 applied by convert_to_integer. There are 2 problematic cases:
4871 - if the first operand was originally of a biased type,
4872 because we could be recursively called to convert it
4873 to an intermediate type and thus rematerialize the
4874 additive operator endlessly,
4875 - if the expression contains a placeholder, because an
4876 intermediate conversion that changes the sign could
4877 be inserted and thus introduce an artificial overflow
4878 at compile time when the placeholder is substituted. */
4879 if (code == INTEGER_TYPE
4880 && ecode == INTEGER_TYPE
4881 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4882 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4883 {
4884 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4885
4886 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4887 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4888 || CONTAINS_PLACEHOLDER_P (expr))
4889 return build1 (NOP_EXPR, type, expr);
4890 }
4891
4892 return fold (convert_to_integer (type, expr));
4893
4894 case POINTER_TYPE:
4895 case REFERENCE_TYPE:
0d7de0e1 4896 /* If converting between two thin pointers, adjust if needed to account
2b45154d
EB
4897 for differing offsets from the base pointer, depending on whether
4898 there is a TYPE_UNCONSTRAINED_ARRAY attached to the record type. */
315cff15 4899 if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
a1ab4c31 4900 {
2b45154d
EB
4901 tree etype_pos
4902 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)) != NULL_TREE
4903 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (etype))))
4904 : size_zero_node;
4905 tree type_pos
4906 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)) != NULL_TREE
4907 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (type))))
4908 : size_zero_node;
4909 tree byte_diff = size_diffop (type_pos, etype_pos);
0d7de0e1 4910
a1ab4c31 4911 expr = build1 (NOP_EXPR, type, expr);
a1ab4c31
AC
4912 if (integer_zerop (byte_diff))
4913 return expr;
4914
4915 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
0d7de0e1 4916 fold_convert (sizetype, byte_diff));
a1ab4c31
AC
4917 }
4918
0d7de0e1
EB
4919 /* If converting fat pointer to normal or thin pointer, get the pointer
4920 to the array and then convert it. */
4921 if (TYPE_IS_FAT_POINTER_P (etype))
86060344
EB
4922 expr
4923 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
a1ab4c31
AC
4924
4925 return fold (convert_to_pointer (type, expr));
4926
4927 case REAL_TYPE:
4928 return fold (convert_to_real (type, expr));
4929
4930 case RECORD_TYPE:
4931 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
0e228dd9 4932 {
9771b263
DN
4933 vec<constructor_elt, va_gc> *v;
4934 vec_alloc (v, 1);
0e228dd9
NF
4935
4936 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4937 convert (TREE_TYPE (TYPE_FIELDS (type)),
4938 expr));
4939 return gnat_build_constructor (type, v);
4940 }
a1ab4c31
AC
4941
4942 /* ... fall through ... */
4943
4944 case ARRAY_TYPE:
4945 /* In these cases, assume the front-end has validated the conversion.
4946 If the conversion is valid, it will be a bit-wise conversion, so
4947 it can be viewed as an unchecked conversion. */
4948 return unchecked_convert (type, expr, false);
4949
4950 case UNION_TYPE:
4951 /* This is a either a conversion between a tagged type and some
4952 subtype, which we have to mark as a UNION_TYPE because of
4953 overlapping fields or a conversion of an Unchecked_Union. */
4954 return unchecked_convert (type, expr, false);
4955
4956 case UNCONSTRAINED_ARRAY_TYPE:
7948ae37
OH
4957 /* If the input is a VECTOR_TYPE, convert to the representative
4958 array type first. */
4959 if (ecode == VECTOR_TYPE)
4960 {
4961 expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
4962 etype = TREE_TYPE (expr);
4963 ecode = TREE_CODE (etype);
4964 }
4965
a1ab4c31
AC
4966 /* If EXPR is a constrained array, take its address, convert it to a
4967 fat pointer, and then dereference it. Likewise if EXPR is a
4968 record containing both a template and a constrained array.
4969 Note that a record representing a justified modular type
4970 always represents a packed constrained array. */
4971 if (ecode == ARRAY_TYPE
4972 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4973 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4974 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4975 return
4976 build_unary_op
4977 (INDIRECT_REF, NULL_TREE,
4978 convert_to_fat_pointer (TREE_TYPE (type),
4979 build_unary_op (ADDR_EXPR,
4980 NULL_TREE, expr)));
4981
4982 /* Do something very similar for converting one unconstrained
4983 array to another. */
4984 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4985 return
4986 build_unary_op (INDIRECT_REF, NULL_TREE,
4987 convert (TREE_TYPE (type),
4988 build_unary_op (ADDR_EXPR,
4989 NULL_TREE, expr)));
4990 else
4991 gcc_unreachable ();
4992
4993 case COMPLEX_TYPE:
4994 return fold (convert_to_complex (type, expr));
4995
4996 default:
4997 gcc_unreachable ();
4998 }
4999}
15bf7d19
EB
5000
5001/* Create an expression whose value is that of EXPR converted to the common
5002 index type, which is sizetype. EXPR is supposed to be in the base type
5003 of the GNAT index type. Calling it is equivalent to doing
5004
5005 convert (sizetype, expr)
5006
5007 but we try to distribute the type conversion with the knowledge that EXPR
5008 cannot overflow in its type. This is a best-effort approach and we fall
5009 back to the above expression as soon as difficulties are encountered.
5010
5011 This is necessary to overcome issues that arise when the GNAT base index
5012 type and the GCC common index type (sizetype) don't have the same size,
5013 which is quite frequent on 64-bit architectures. In this case, and if
5014 the GNAT base index type is signed but the iteration type of the loop has
5015 been forced to unsigned, the loop scalar evolution engine cannot compute
5016 a simple evolution for the general induction variables associated with the
5017 array indices, because it will preserve the wrap-around semantics in the
5018 unsigned type of their "inner" part. As a result, many loop optimizations
5019 are blocked.
5020
5021 The solution is to use a special (basic) induction variable that is at
5022 least as large as sizetype, and to express the aforementioned general
5023 induction variables in terms of this induction variable, eliminating
5024 the problematic intermediate truncation to the GNAT base index type.
5025 This is possible as long as the original expression doesn't overflow
5026 and if the middle-end hasn't introduced artificial overflows in the
5027 course of the various simplification it can make to the expression. */
5028
5029tree
5030convert_to_index_type (tree expr)
5031{
5032 enum tree_code code = TREE_CODE (expr);
5033 tree type = TREE_TYPE (expr);
5034
5035 /* If the type is unsigned, overflow is allowed so we cannot be sure that
5036 EXPR doesn't overflow. Keep it simple if optimization is disabled. */
5037 if (TYPE_UNSIGNED (type) || !optimize)
5038 return convert (sizetype, expr);
5039
5040 switch (code)
5041 {
5042 case VAR_DECL:
5043 /* The main effect of the function: replace a loop parameter with its
5044 associated special induction variable. */
5045 if (DECL_LOOP_PARM_P (expr) && DECL_INDUCTION_VAR (expr))
5046 expr = DECL_INDUCTION_VAR (expr);
5047 break;
5048
5049 CASE_CONVERT:
5050 {
5051 tree otype = TREE_TYPE (TREE_OPERAND (expr, 0));
5052 /* Bail out as soon as we suspect some sort of type frobbing. */
5053 if (TYPE_PRECISION (type) != TYPE_PRECISION (otype)
5054 || TYPE_UNSIGNED (type) != TYPE_UNSIGNED (otype))
5055 break;
5056 }
5057
5058 /* ... fall through ... */
5059
5060 case NON_LVALUE_EXPR:
5061 return fold_build1 (code, sizetype,
5062 convert_to_index_type (TREE_OPERAND (expr, 0)));
5063
5064 case PLUS_EXPR:
5065 case MINUS_EXPR:
5066 case MULT_EXPR:
5067 return fold_build2 (code, sizetype,
5068 convert_to_index_type (TREE_OPERAND (expr, 0)),
5069 convert_to_index_type (TREE_OPERAND (expr, 1)));
5070
5071 case COMPOUND_EXPR:
5072 return fold_build2 (code, sizetype, TREE_OPERAND (expr, 0),
5073 convert_to_index_type (TREE_OPERAND (expr, 1)));
5074
5075 case COND_EXPR:
5076 return fold_build3 (code, sizetype, TREE_OPERAND (expr, 0),
5077 convert_to_index_type (TREE_OPERAND (expr, 1)),
5078 convert_to_index_type (TREE_OPERAND (expr, 2)));
5079
5080 default:
5081 break;
5082 }
5083
5084 return convert (sizetype, expr);
5085}
a1ab4c31
AC
5086\f
5087/* Remove all conversions that are done in EXP. This includes converting
5088 from a padded type or to a justified modular type. If TRUE_ADDRESS
5089 is true, always return the address of the containing object even if
5090 the address is not bit-aligned. */
5091
5092tree
5093remove_conversions (tree exp, bool true_address)
5094{
5095 switch (TREE_CODE (exp))
5096 {
5097 case CONSTRUCTOR:
5098 if (true_address
5099 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
5100 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
5101 return
9771b263 5102 remove_conversions ((*CONSTRUCTOR_ELTS (exp))[0].value, true);
a1ab4c31
AC
5103 break;
5104
5105 case COMPONENT_REF:
315cff15 5106 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
a1ab4c31
AC
5107 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
5108 break;
5109
a1ab4c31 5110 CASE_CONVERT:
722356ce
EB
5111 case VIEW_CONVERT_EXPR:
5112 case NON_LVALUE_EXPR:
a1ab4c31
AC
5113 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
5114
5115 default:
5116 break;
5117 }
5118
5119 return exp;
5120}
5121\f
5122/* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
86060344 5123 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
a1ab4c31
AC
5124 likewise return an expression pointing to the underlying array. */
5125
5126tree
5127maybe_unconstrained_array (tree exp)
5128{
5129 enum tree_code code = TREE_CODE (exp);
1aa291f7 5130 tree type = TREE_TYPE (exp);
a1ab4c31 5131
1aa291f7 5132 switch (TREE_CODE (type))
a1ab4c31
AC
5133 {
5134 case UNCONSTRAINED_ARRAY_TYPE:
5135 if (code == UNCONSTRAINED_ARRAY_REF)
5136 {
7e169899 5137 const bool read_only = TREE_READONLY (exp);
a1c7d797
EB
5138 const bool no_trap = TREE_THIS_NOTRAP (exp);
5139
7e169899 5140 exp = TREE_OPERAND (exp, 0);
1aa291f7
EB
5141 type = TREE_TYPE (exp);
5142
7e169899
EB
5143 if (TREE_CODE (exp) == COND_EXPR)
5144 {
5145 tree op1
5146 = build_unary_op (INDIRECT_REF, NULL_TREE,
5147 build_component_ref (TREE_OPERAND (exp, 1),
5148 NULL_TREE,
1aa291f7 5149 TYPE_FIELDS (type),
7e169899
EB
5150 false));
5151 tree op2
5152 = build_unary_op (INDIRECT_REF, NULL_TREE,
5153 build_component_ref (TREE_OPERAND (exp, 2),
5154 NULL_TREE,
1aa291f7 5155 TYPE_FIELDS (type),
7e169899
EB
5156 false));
5157
5158 exp = build3 (COND_EXPR,
1aa291f7 5159 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))),
7e169899
EB
5160 TREE_OPERAND (exp, 0), op1, op2);
5161 }
5162 else
a1c7d797
EB
5163 {
5164 exp = build_unary_op (INDIRECT_REF, NULL_TREE,
5165 build_component_ref (exp, NULL_TREE,
1aa291f7 5166 TYPE_FIELDS (type),
a1c7d797
EB
5167 false));
5168 TREE_READONLY (exp) = read_only;
5169 TREE_THIS_NOTRAP (exp) = no_trap;
5170 }
a1ab4c31
AC
5171 }
5172
5173 else if (code == NULL_EXPR)
1aa291f7
EB
5174 exp = build1 (NULL_EXPR,
5175 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type)))),
5176 TREE_OPERAND (exp, 0));
5177 break;
a1ab4c31
AC
5178
5179 case RECORD_TYPE:
1aa291f7
EB
5180 /* If this is a padded type and it contains a template, convert to the
5181 unpadded type first. */
5182 if (TYPE_PADDING_P (type)
5183 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
5184 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
a1ab4c31 5185 {
1aa291f7
EB
5186 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
5187 type = TREE_TYPE (exp);
5188 }
5189
5190 if (TYPE_CONTAINS_TEMPLATE_P (type))
5191 {
5192 exp = build_component_ref (exp, NULL_TREE,
5193 DECL_CHAIN (TYPE_FIELDS (type)),
5194 false);
5195 type = TREE_TYPE (exp);
5196
5197 /* If the array type is padded, convert to the unpadded type. */
5198 if (TYPE_IS_PADDING_P (type))
5199 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
a1ab4c31 5200 }
a1ab4c31
AC
5201 break;
5202
5203 default:
5204 break;
5205 }
5206
5207 return exp;
5208}
5209\f
afcea859 5210/* Return true if EXPR is an expression that can be folded as an operand
84fb43a1 5211 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
afcea859
EB
5212
5213static bool
5214can_fold_for_view_convert_p (tree expr)
5215{
5216 tree t1, t2;
5217
5218 /* The folder will fold NOP_EXPRs between integral types with the same
5219 precision (in the middle-end's sense). We cannot allow it if the
5220 types don't have the same precision in the Ada sense as well. */
5221 if (TREE_CODE (expr) != NOP_EXPR)
5222 return true;
5223
5224 t1 = TREE_TYPE (expr);
5225 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
5226
5227 /* Defer to the folder for non-integral conversions. */
5228 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
5229 return true;
5230
5231 /* Only fold conversions that preserve both precisions. */
5232 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
5233 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
5234 return true;
5235
5236 return false;
5237}
5238
a1ab4c31 5239/* Return an expression that does an unchecked conversion of EXPR to TYPE.
afcea859
EB
5240 If NOTRUNC_P is true, truncation operations should be suppressed.
5241
5242 Special care is required with (source or target) integral types whose
5243 precision is not equal to their size, to make sure we fetch or assign
5244 the value bits whose location might depend on the endianness, e.g.
5245
5246 Rmsize : constant := 8;
5247 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
5248
5249 type Bit_Array is array (1 .. Rmsize) of Boolean;
5250 pragma Pack (Bit_Array);
5251
5252 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
5253
5254 Value : Int := 2#1000_0001#;
5255 Vbits : Bit_Array := To_Bit_Array (Value);
5256
5257 we expect the 8 bits at Vbits'Address to always contain Value, while
5258 their original location depends on the endianness, at Value'Address
84fb43a1 5259 on a little-endian architecture but not on a big-endian one. */
a1ab4c31
AC
5260
5261tree
5262unchecked_convert (tree type, tree expr, bool notrunc_p)
5263{
5264 tree etype = TREE_TYPE (expr);
c34f3839
EB
5265 enum tree_code ecode = TREE_CODE (etype);
5266 enum tree_code code = TREE_CODE (type);
980a0501 5267 int c;
a1ab4c31 5268
c34f3839 5269 /* If the expression is already of the right type, we are done. */
a1ab4c31
AC
5270 if (etype == type)
5271 return expr;
5272
5273 /* If both types types are integral just do a normal conversion.
5274 Likewise for a conversion to an unconstrained array. */
5275 if ((((INTEGRAL_TYPE_P (type)
c34f3839 5276 && !(code == INTEGER_TYPE && TYPE_VAX_FLOATING_POINT_P (type)))
0d7de0e1 5277 || (POINTER_TYPE_P (type) && !TYPE_IS_THIN_POINTER_P (type))
c34f3839 5278 || (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type)))
a1ab4c31 5279 && ((INTEGRAL_TYPE_P (etype)
c34f3839 5280 && !(ecode == INTEGER_TYPE && TYPE_VAX_FLOATING_POINT_P (etype)))
315cff15 5281 || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
c34f3839
EB
5282 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype))))
5283 || code == UNCONSTRAINED_ARRAY_TYPE)
a1ab4c31 5284 {
c34f3839 5285 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
a1ab4c31
AC
5286 {
5287 tree ntype = copy_type (etype);
a1ab4c31
AC
5288 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
5289 TYPE_MAIN_VARIANT (ntype) = ntype;
5290 expr = build1 (NOP_EXPR, ntype, expr);
5291 }
5292
c34f3839 5293 if (code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
a1ab4c31 5294 {
afcea859 5295 tree rtype = copy_type (type);
a1ab4c31
AC
5296 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
5297 TYPE_MAIN_VARIANT (rtype) = rtype;
afcea859
EB
5298 expr = convert (rtype, expr);
5299 expr = build1 (NOP_EXPR, type, expr);
a1ab4c31 5300 }
afcea859
EB
5301 else
5302 expr = convert (type, expr);
a1ab4c31
AC
5303 }
5304
afcea859 5305 /* If we are converting to an integral type whose precision is not equal
416de7d5
EB
5306 to its size, first unchecked convert to a record type that contains an
5307 field of the given precision. Then extract the field. */
980a0501
EB
5308 else if (INTEGRAL_TYPE_P (type)
5309 && TYPE_RM_SIZE (type)
a1ab4c31
AC
5310 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
5311 GET_MODE_BITSIZE (TYPE_MODE (type))))
5312 {
5313 tree rec_type = make_node (RECORD_TYPE);
416de7d5
EB
5314 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (type));
5315 tree field_type, field;
5316
5317 if (TYPE_UNSIGNED (type))
5318 field_type = make_unsigned_type (prec);
5319 else
5320 field_type = make_signed_type (prec);
5321 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (type));
5322
5323 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5324 NULL_TREE, NULL_TREE, 1, 0);
a1ab4c31
AC
5325
5326 TYPE_FIELDS (rec_type) = field;
5327 layout_type (rec_type);
5328
5329 expr = unchecked_convert (rec_type, expr, notrunc_p);
3cd64bab 5330 expr = build_component_ref (expr, NULL_TREE, field, false);
416de7d5 5331 expr = fold_build1 (NOP_EXPR, type, expr);
a1ab4c31
AC
5332 }
5333
416de7d5
EB
5334 /* Similarly if we are converting from an integral type whose precision is
5335 not equal to its size, first copy into a field of the given precision
5336 and unchecked convert the record type. */
980a0501
EB
5337 else if (INTEGRAL_TYPE_P (etype)
5338 && TYPE_RM_SIZE (etype)
5339 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
5340 GET_MODE_BITSIZE (TYPE_MODE (etype))))
a1ab4c31
AC
5341 {
5342 tree rec_type = make_node (RECORD_TYPE);
416de7d5 5343 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (etype));
9771b263
DN
5344 vec<constructor_elt, va_gc> *v;
5345 vec_alloc (v, 1);
416de7d5
EB
5346 tree field_type, field;
5347
5348 if (TYPE_UNSIGNED (etype))
5349 field_type = make_unsigned_type (prec);
5350 else
5351 field_type = make_signed_type (prec);
5352 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (etype));
5353
5354 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5355 NULL_TREE, NULL_TREE, 1, 0);
a1ab4c31
AC
5356
5357 TYPE_FIELDS (rec_type) = field;
5358 layout_type (rec_type);
5359
416de7d5 5360 expr = fold_build1 (NOP_EXPR, field_type, expr);
0e228dd9
NF
5361 CONSTRUCTOR_APPEND_ELT (v, field, expr);
5362 expr = gnat_build_constructor (rec_type, v);
a1ab4c31
AC
5363 expr = unchecked_convert (type, expr, notrunc_p);
5364 }
5365
980a0501
EB
5366 /* If we are converting from a scalar type to a type with a different size,
5367 we need to pad to have the same size on both sides.
5368
5369 ??? We cannot do it unconditionally because unchecked conversions are
5370 used liberally by the front-end to implement polymorphism, e.g. in:
5371
5372 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
5373 return p___size__4 (p__object!(S191s.all));
5374
5375 so we skip all expressions that are references. */
5376 else if (!REFERENCE_CLASS_P (expr)
5377 && !AGGREGATE_TYPE_P (etype)
5378 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5379 && (c = tree_int_cst_compare (TYPE_SIZE (etype), TYPE_SIZE (type))))
5380 {
5381 if (c < 0)
5382 {
5383 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
5384 false, false, false, true),
5385 expr);
5386 expr = unchecked_convert (type, expr, notrunc_p);
5387 }
5388 else
5389 {
5390 tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
5391 false, false, false, true);
5392 expr = unchecked_convert (rec_type, expr, notrunc_p);
5393 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (rec_type),
5394 false);
5395 }
5396 }
5397
7948ae37
OH
5398 /* We have a special case when we are converting between two unconstrained
5399 array types. In that case, take the address, convert the fat pointer
5400 types, and dereference. */
c34f3839 5401 else if (ecode == code && code == UNCONSTRAINED_ARRAY_TYPE)
a1ab4c31
AC
5402 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
5403 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
5404 build_unary_op (ADDR_EXPR, NULL_TREE,
5405 expr)));
7948ae37
OH
5406
5407 /* Another special case is when we are converting to a vector type from its
5408 representative array type; this a regular conversion. */
c34f3839
EB
5409 else if (code == VECTOR_TYPE
5410 && ecode == ARRAY_TYPE
7948ae37
OH
5411 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
5412 etype))
5413 expr = convert (type, expr);
5414
a1ab4c31
AC
5415 else
5416 {
5417 expr = maybe_unconstrained_array (expr);
5418 etype = TREE_TYPE (expr);
c34f3839 5419 ecode = TREE_CODE (etype);
afcea859
EB
5420 if (can_fold_for_view_convert_p (expr))
5421 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
5422 else
5423 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
a1ab4c31
AC
5424 }
5425
afcea859
EB
5426 /* If the result is an integral type whose precision is not equal to its
5427 size, sign- or zero-extend the result. We need not do this if the input
5428 is an integral type of the same precision and signedness or if the output
a1ab4c31
AC
5429 is a biased type or if both the input and output are unsigned. */
5430 if (!notrunc_p
5431 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
c34f3839 5432 && !(code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
a1ab4c31
AC
5433 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
5434 GET_MODE_BITSIZE (TYPE_MODE (type)))
5435 && !(INTEGRAL_TYPE_P (etype)
5436 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
5437 && operand_equal_p (TYPE_RM_SIZE (type),
5438 (TYPE_RM_SIZE (etype) != 0
5439 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
5440 0))
5441 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
5442 {
c34f3839
EB
5443 tree base_type
5444 = gnat_type_for_mode (TYPE_MODE (type), TYPE_UNSIGNED (type));
a1ab4c31
AC
5445 tree shift_expr
5446 = convert (base_type,
5447 size_binop (MINUS_EXPR,
5448 bitsize_int
5449 (GET_MODE_BITSIZE (TYPE_MODE (type))),
5450 TYPE_RM_SIZE (type)));
5451 expr
5452 = convert (type,
5453 build_binary_op (RSHIFT_EXPR, base_type,
5454 build_binary_op (LSHIFT_EXPR, base_type,
5455 convert (base_type, expr),
5456 shift_expr),
5457 shift_expr));
5458 }
5459
5460 /* An unchecked conversion should never raise Constraint_Error. The code
5461 below assumes that GCC's conversion routines overflow the same way that
5462 the underlying hardware does. This is probably true. In the rare case
5463 when it is false, we can rely on the fact that such conversions are
5464 erroneous anyway. */
5465 if (TREE_CODE (expr) == INTEGER_CST)
5466 TREE_OVERFLOW (expr) = 0;
5467
5468 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
5469 show no longer constant. */
5470 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
5471 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
5472 OEP_ONLY_CONST))
5473 TREE_CONSTANT (expr) = 0;
5474
5475 return expr;
5476}
5477\f
feec4372 5478/* Return the appropriate GCC tree code for the specified GNAT_TYPE,
a1ab4c31
AC
5479 the latter being a record type as predicated by Is_Record_Type. */
5480
5481enum tree_code
5482tree_code_for_record_type (Entity_Id gnat_type)
5483{
b1a785fb 5484 Node_Id component_list, component;
a1ab4c31 5485
b1a785fb
EB
5486 /* Return UNION_TYPE if it's an Unchecked_Union whose non-discriminant
5487 fields are all in the variant part. Otherwise, return RECORD_TYPE. */
a1ab4c31
AC
5488 if (!Is_Unchecked_Union (gnat_type))
5489 return RECORD_TYPE;
5490
b1a785fb
EB
5491 gnat_type = Implementation_Base_Type (gnat_type);
5492 component_list
5493 = Component_List (Type_Definition (Declaration_Node (gnat_type)));
5494
a1ab4c31
AC
5495 for (component = First_Non_Pragma (Component_Items (component_list));
5496 Present (component);
5497 component = Next_Non_Pragma (component))
5498 if (Ekind (Defining_Entity (component)) == E_Component)
5499 return RECORD_TYPE;
5500
5501 return UNION_TYPE;
5502}
5503
caa9d12a
EB
5504/* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
5505 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
5506 according to the presence of an alignment clause on the type or, if it
5507 is an array, on the component type. */
5508
5509bool
5510is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
5511{
5512 gnat_type = Underlying_Type (gnat_type);
5513
5514 *align_clause = Present (Alignment_Clause (gnat_type));
5515
5516 if (Is_Array_Type (gnat_type))
5517 {
5518 gnat_type = Underlying_Type (Component_Type (gnat_type));
5519 if (Present (Alignment_Clause (gnat_type)))
5520 *align_clause = true;
5521 }
5522
5523 if (!Is_Floating_Point_Type (gnat_type))
5524 return false;
5525
5526 if (UI_To_Int (Esize (gnat_type)) != 64)
5527 return false;
5528
5529 return true;
5530}
5531
5532/* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
5533 size is greater or equal to 64 bits, or an array of such a type. Set
5534 ALIGN_CLAUSE according to the presence of an alignment clause on the
5535 type or, if it is an array, on the component type. */
5536
5537bool
5538is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
5539{
5540 gnat_type = Underlying_Type (gnat_type);
5541
5542 *align_clause = Present (Alignment_Clause (gnat_type));
5543
5544 if (Is_Array_Type (gnat_type))
5545 {
5546 gnat_type = Underlying_Type (Component_Type (gnat_type));
5547 if (Present (Alignment_Clause (gnat_type)))
5548 *align_clause = true;
5549 }
5550
5551 if (!Is_Scalar_Type (gnat_type))
5552 return false;
5553
5554 if (UI_To_Int (Esize (gnat_type)) < 64)
5555 return false;
5556
5557 return true;
5558}
5559
a1ab4c31
AC
5560/* Return true if GNU_TYPE is suitable as the type of a non-aliased
5561 component of an aggregate type. */
5562
5563bool
5564type_for_nonaliased_component_p (tree gnu_type)
5565{
5566 /* If the type is passed by reference, we may have pointers to the
5567 component so it cannot be made non-aliased. */
5568 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
5569 return false;
5570
5571 /* We used to say that any component of aggregate type is aliased
5572 because the front-end may take 'Reference of it. The front-end
5573 has been enhanced in the meantime so as to use a renaming instead
5574 in most cases, but the back-end can probably take the address of
5575 such a component too so we go for the conservative stance.
5576
5577 For instance, we might need the address of any array type, even
5578 if normally passed by copy, to construct a fat pointer if the
5579 component is used as an actual for an unconstrained formal.
5580
5581 Likewise for record types: even if a specific record subtype is
5582 passed by copy, the parent type might be passed by ref (e.g. if
5583 it's of variable size) and we might take the address of a child
5584 component to pass to a parent formal. We have no way to check
5585 for such conditions here. */
5586 if (AGGREGATE_TYPE_P (gnu_type))
5587 return false;
5588
5589 return true;
5590}
5591
bb1f7929
EB
5592/* Return true if TYPE is a smaller form of ORIG_TYPE. */
5593
5594bool
5595smaller_form_type_p (tree type, tree orig_type)
5596{
5597 tree size, osize;
5598
5599 /* We're not interested in variants here. */
5600 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig_type))
5601 return false;
5602
5603 /* Like a variant, a packable version keeps the original TYPE_NAME. */
5604 if (TYPE_NAME (type) != TYPE_NAME (orig_type))
5605 return false;
5606
5607 size = TYPE_SIZE (type);
5608 osize = TYPE_SIZE (orig_type);
5609
5610 if (!(TREE_CODE (size) == INTEGER_CST && TREE_CODE (osize) == INTEGER_CST))
5611 return false;
5612
5613 return tree_int_cst_lt (size, osize) != 0;
5614}
5615
a1ab4c31
AC
5616/* Perform final processing on global variables. */
5617
65444786
EB
5618static GTY (()) tree dummy_global;
5619
a1ab4c31
AC
5620void
5621gnat_write_global_declarations (void)
5622{
10e4d056
EB
5623 unsigned int i;
5624 tree iter;
5625
65444786
EB
5626 /* If we have declared types as used at the global level, insert them in
5627 the global hash table. We use a dummy variable for this purpose. */
9771b263 5628 if (types_used_by_cur_var_decl && !types_used_by_cur_var_decl->is_empty ())
65444786 5629 {
35e8bcf4 5630 struct varpool_node *node;
d3c268ab
EB
5631 char *label;
5632
5633 ASM_FORMAT_PRIVATE_NAME (label, first_global_object_name, 0);
65444786 5634 dummy_global
d3c268ab
EB
5635 = build_decl (BUILTINS_LOCATION, VAR_DECL, get_identifier (label),
5636 void_type_node);
65444786
EB
5637 TREE_STATIC (dummy_global) = 1;
5638 TREE_ASM_WRITTEN (dummy_global) = 1;
5d59b5e1 5639 node = varpool_node_for_decl (dummy_global);
67348ccc 5640 node->force_output = 1;
65444786 5641
9771b263 5642 while (!types_used_by_cur_var_decl->is_empty ())
65444786 5643 {
9771b263 5644 tree t = types_used_by_cur_var_decl->pop ();
65444786
EB
5645 types_used_by_var_decl_insert (t, dummy_global);
5646 }
5647 }
5648
10e4d056
EB
5649 /* Output debug information for all global type declarations first. This
5650 ensures that global types whose compilation hasn't been finalized yet,
5651 for example pointers to Taft amendment types, have their compilation
5652 finalized in the right context. */
9771b263 5653 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
10e4d056
EB
5654 if (TREE_CODE (iter) == TYPE_DECL)
5655 debug_hooks->global_decl (iter);
5656
65d630d4
JH
5657 /* Proceed to optimize and emit assembly. */
5658 finalize_compilation_unit ();
a1ab4c31 5659
10e4d056
EB
5660 /* After cgraph has had a chance to emit everything that's going to
5661 be emitted, output debug information for the rest of globals. */
5662 if (!seen_error ())
5663 {
5664 timevar_push (TV_SYMOUT);
9771b263 5665 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
10e4d056
EB
5666 if (TREE_CODE (iter) != TYPE_DECL)
5667 debug_hooks->global_decl (iter);
5668 timevar_pop (TV_SYMOUT);
5669 }
a1ab4c31
AC
5670}
5671
5672/* ************************************************************************
5673 * * GCC builtins support *
5674 * ************************************************************************ */
5675
5676/* The general scheme is fairly simple:
5677
5678 For each builtin function/type to be declared, gnat_install_builtins calls
5679 internal facilities which eventually get to gnat_push_decl, which in turn
5680 tracks the so declared builtin function decls in the 'builtin_decls' global
5681 datastructure. When an Intrinsic subprogram declaration is processed, we
5682 search this global datastructure to retrieve the associated BUILT_IN DECL
5683 node. */
5684
5685/* Search the chain of currently available builtin declarations for a node
5686 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
5687 found, if any, or NULL_TREE otherwise. */
5688tree
5689builtin_decl_for (tree name)
5690{
5691 unsigned i;
5692 tree decl;
5693
9771b263 5694 FOR_EACH_VEC_SAFE_ELT (builtin_decls, i, decl)
a1ab4c31
AC
5695 if (DECL_NAME (decl) == name)
5696 return decl;
5697
5698 return NULL_TREE;
5699}
5700
5701/* The code below eventually exposes gnat_install_builtins, which declares
5702 the builtin types and functions we might need, either internally or as
5703 user accessible facilities.
5704
5705 ??? This is a first implementation shot, still in rough shape. It is
5706 heavily inspired from the "C" family implementation, with chunks copied
5707 verbatim from there.
5708
5709 Two obvious TODO candidates are
5710 o Use a more efficient name/decl mapping scheme
5711 o Devise a middle-end infrastructure to avoid having to copy
5712 pieces between front-ends. */
5713
5714/* ----------------------------------------------------------------------- *
5715 * BUILTIN ELEMENTARY TYPES *
5716 * ----------------------------------------------------------------------- */
5717
5718/* Standard data types to be used in builtin argument declarations. */
5719
5720enum c_tree_index
5721{
5722 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
5723 CTI_STRING_TYPE,
5724 CTI_CONST_STRING_TYPE,
5725
5726 CTI_MAX
5727};
5728
5729static tree c_global_trees[CTI_MAX];
5730
5731#define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
5732#define string_type_node c_global_trees[CTI_STRING_TYPE]
5733#define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
5734
5735/* ??? In addition some attribute handlers, we currently don't support a
5736 (small) number of builtin-types, which in turns inhibits support for a
5737 number of builtin functions. */
5738#define wint_type_node void_type_node
5739#define intmax_type_node void_type_node
5740#define uintmax_type_node void_type_node
5741
5742/* Build the void_list_node (void_type_node having been created). */
5743
5744static tree
5745build_void_list_node (void)
5746{
5747 tree t = build_tree_list (NULL_TREE, void_type_node);
5748 return t;
5749}
5750
5751/* Used to help initialize the builtin-types.def table. When a type of
5752 the correct size doesn't exist, use error_mark_node instead of NULL.
5753 The later results in segfaults even when a decl using the type doesn't
5754 get invoked. */
5755
5756static tree
5757builtin_type_for_size (int size, bool unsignedp)
5758{
ced57283 5759 tree type = gnat_type_for_size (size, unsignedp);
a1ab4c31
AC
5760 return type ? type : error_mark_node;
5761}
5762
5763/* Build/push the elementary type decls that builtin functions/types
5764 will need. */
5765
5766static void
5767install_builtin_elementary_types (void)
5768{
728936bb 5769 signed_size_type_node = gnat_signed_type (size_type_node);
a1ab4c31
AC
5770 pid_type_node = integer_type_node;
5771 void_list_node = build_void_list_node ();
5772
5773 string_type_node = build_pointer_type (char_type_node);
5774 const_string_type_node
5775 = build_pointer_type (build_qualified_type
5776 (char_type_node, TYPE_QUAL_CONST));
5777}
5778
5779/* ----------------------------------------------------------------------- *
5780 * BUILTIN FUNCTION TYPES *
5781 * ----------------------------------------------------------------------- */
5782
5783/* Now, builtin function types per se. */
5784
5785enum c_builtin_type
5786{
5787#define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
5788#define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
5789#define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
5790#define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
5791#define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5792#define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5793#define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
5794#define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6) NAME,
5795#define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7) NAME,
acf0174b 5796#define DEF_FUNCTION_TYPE_8(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7, ARG8) NAME,
a1ab4c31
AC
5797#define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
5798#define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
5799#define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
5800#define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5801#define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5802#define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG6) \
5803 NAME,
5804#define DEF_POINTER_TYPE(NAME, TYPE) NAME,
5805#include "builtin-types.def"
5806#undef DEF_PRIMITIVE_TYPE
5807#undef DEF_FUNCTION_TYPE_0
5808#undef DEF_FUNCTION_TYPE_1
5809#undef DEF_FUNCTION_TYPE_2
5810#undef DEF_FUNCTION_TYPE_3
5811#undef DEF_FUNCTION_TYPE_4
5812#undef DEF_FUNCTION_TYPE_5
5813#undef DEF_FUNCTION_TYPE_6
5814#undef DEF_FUNCTION_TYPE_7
acf0174b 5815#undef DEF_FUNCTION_TYPE_8
a1ab4c31
AC
5816#undef DEF_FUNCTION_TYPE_VAR_0
5817#undef DEF_FUNCTION_TYPE_VAR_1
5818#undef DEF_FUNCTION_TYPE_VAR_2
5819#undef DEF_FUNCTION_TYPE_VAR_3
5820#undef DEF_FUNCTION_TYPE_VAR_4
5821#undef DEF_FUNCTION_TYPE_VAR_5
5822#undef DEF_POINTER_TYPE
5823 BT_LAST
5824};
5825
5826typedef enum c_builtin_type builtin_type;
5827
5828/* A temporary array used in communication with def_fn_type. */
5829static GTY(()) tree builtin_types[(int) BT_LAST + 1];
5830
5831/* A helper function for install_builtin_types. Build function type
5832 for DEF with return type RET and N arguments. If VAR is true, then the
5833 function should be variadic after those N arguments.
5834
5835 Takes special care not to ICE if any of the types involved are
5836 error_mark_node, which indicates that said type is not in fact available
5837 (see builtin_type_for_size). In which case the function type as a whole
5838 should be error_mark_node. */
5839
5840static void
5841def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
5842{
e5b00edf
NF
5843 tree t;
5844 tree *args = XALLOCAVEC (tree, n);
a1ab4c31
AC
5845 va_list list;
5846 int i;
5847
5848 va_start (list, n);
5849 for (i = 0; i < n; ++i)
5850 {
c6bd4220 5851 builtin_type a = (builtin_type) va_arg (list, int);
a1ab4c31
AC
5852 t = builtin_types[a];
5853 if (t == error_mark_node)
5854 goto egress;
e5b00edf 5855 args[i] = t;
a1ab4c31 5856 }
a1ab4c31 5857
a1ab4c31
AC
5858 t = builtin_types[ret];
5859 if (t == error_mark_node)
5860 goto egress;
e5b00edf
NF
5861 if (var)
5862 t = build_varargs_function_type_array (t, n, args);
5863 else
5864 t = build_function_type_array (t, n, args);
a1ab4c31
AC
5865
5866 egress:
5867 builtin_types[def] = t;
0edf1bb2 5868 va_end (list);
a1ab4c31
AC
5869}
5870
5871/* Build the builtin function types and install them in the builtin_types
5872 array for later use in builtin function decls. */
5873
5874static void
5875install_builtin_function_types (void)
5876{
5877 tree va_list_ref_type_node;
5878 tree va_list_arg_type_node;
5879
5880 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
5881 {
5882 va_list_arg_type_node = va_list_ref_type_node =
5883 build_pointer_type (TREE_TYPE (va_list_type_node));
5884 }
5885 else
5886 {
5887 va_list_arg_type_node = va_list_type_node;
5888 va_list_ref_type_node = build_reference_type (va_list_type_node);
5889 }
5890
5891#define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
5892 builtin_types[ENUM] = VALUE;
5893#define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
5894 def_fn_type (ENUM, RETURN, 0, 0);
5895#define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
5896 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
5897#define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
5898 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
5899#define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5900 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
5901#define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5902 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
5903#define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5904 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5905#define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5906 ARG6) \
5907 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5908#define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5909 ARG6, ARG7) \
5910 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
acf0174b
JJ
5911#define DEF_FUNCTION_TYPE_8(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5912 ARG6, ARG7, ARG8) \
5913 def_fn_type (ENUM, RETURN, 0, 8, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5914 ARG7, ARG8);
a1ab4c31
AC
5915#define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
5916 def_fn_type (ENUM, RETURN, 1, 0);
5917#define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
5918 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
5919#define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
5920 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
5921#define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5922 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
5923#define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5924 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
5925#define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5926 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5927#define DEF_POINTER_TYPE(ENUM, TYPE) \
5928 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
5929
5930#include "builtin-types.def"
5931
5932#undef DEF_PRIMITIVE_TYPE
5933#undef DEF_FUNCTION_TYPE_1
5934#undef DEF_FUNCTION_TYPE_2
5935#undef DEF_FUNCTION_TYPE_3
5936#undef DEF_FUNCTION_TYPE_4
5937#undef DEF_FUNCTION_TYPE_5
5938#undef DEF_FUNCTION_TYPE_6
5939#undef DEF_FUNCTION_TYPE_VAR_0
5940#undef DEF_FUNCTION_TYPE_VAR_1
5941#undef DEF_FUNCTION_TYPE_VAR_2
5942#undef DEF_FUNCTION_TYPE_VAR_3
5943#undef DEF_FUNCTION_TYPE_VAR_4
5944#undef DEF_FUNCTION_TYPE_VAR_5
5945#undef DEF_POINTER_TYPE
5946 builtin_types[(int) BT_LAST] = NULL_TREE;
5947}
5948
5949/* ----------------------------------------------------------------------- *
5950 * BUILTIN ATTRIBUTES *
5951 * ----------------------------------------------------------------------- */
5952
5953enum built_in_attribute
5954{
5955#define DEF_ATTR_NULL_TREE(ENUM) ENUM,
5956#define DEF_ATTR_INT(ENUM, VALUE) ENUM,
e384e6b5 5957#define DEF_ATTR_STRING(ENUM, VALUE) ENUM,
a1ab4c31
AC
5958#define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
5959#define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
5960#include "builtin-attrs.def"
5961#undef DEF_ATTR_NULL_TREE
5962#undef DEF_ATTR_INT
e384e6b5 5963#undef DEF_ATTR_STRING
a1ab4c31
AC
5964#undef DEF_ATTR_IDENT
5965#undef DEF_ATTR_TREE_LIST
5966 ATTR_LAST
5967};
5968
5969static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
5970
5971static void
5972install_builtin_attributes (void)
5973{
5974 /* Fill in the built_in_attributes array. */
5975#define DEF_ATTR_NULL_TREE(ENUM) \
5976 built_in_attributes[(int) ENUM] = NULL_TREE;
5977#define DEF_ATTR_INT(ENUM, VALUE) \
5978 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
e384e6b5
BS
5979#define DEF_ATTR_STRING(ENUM, VALUE) \
5980 built_in_attributes[(int) ENUM] = build_string (strlen (VALUE), VALUE);
a1ab4c31
AC
5981#define DEF_ATTR_IDENT(ENUM, STRING) \
5982 built_in_attributes[(int) ENUM] = get_identifier (STRING);
5983#define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5984 built_in_attributes[(int) ENUM] \
5985 = tree_cons (built_in_attributes[(int) PURPOSE], \
5986 built_in_attributes[(int) VALUE], \
5987 built_in_attributes[(int) CHAIN]);
5988#include "builtin-attrs.def"
5989#undef DEF_ATTR_NULL_TREE
5990#undef DEF_ATTR_INT
e384e6b5 5991#undef DEF_ATTR_STRING
a1ab4c31
AC
5992#undef DEF_ATTR_IDENT
5993#undef DEF_ATTR_TREE_LIST
5994}
5995
5996/* Handle a "const" attribute; arguments as in
5997 struct attribute_spec.handler. */
5998
5999static tree
6000handle_const_attribute (tree *node, tree ARG_UNUSED (name),
6001 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6002 bool *no_add_attrs)
6003{
6004 if (TREE_CODE (*node) == FUNCTION_DECL)
6005 TREE_READONLY (*node) = 1;
6006 else
6007 *no_add_attrs = true;
6008
6009 return NULL_TREE;
6010}
6011
6012/* Handle a "nothrow" attribute; arguments as in
6013 struct attribute_spec.handler. */
6014
6015static tree
6016handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
6017 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6018 bool *no_add_attrs)
6019{
6020 if (TREE_CODE (*node) == FUNCTION_DECL)
6021 TREE_NOTHROW (*node) = 1;
6022 else
6023 *no_add_attrs = true;
6024
6025 return NULL_TREE;
6026}
6027
6028/* Handle a "pure" attribute; arguments as in
6029 struct attribute_spec.handler. */
6030
6031static tree
6032handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6033 int ARG_UNUSED (flags), bool *no_add_attrs)
6034{
6035 if (TREE_CODE (*node) == FUNCTION_DECL)
6036 DECL_PURE_P (*node) = 1;
6037 /* ??? TODO: Support types. */
6038 else
6039 {
7948ae37
OH
6040 warning (OPT_Wattributes, "%qs attribute ignored",
6041 IDENTIFIER_POINTER (name));
a1ab4c31
AC
6042 *no_add_attrs = true;
6043 }
6044
6045 return NULL_TREE;
6046}
6047
6048/* Handle a "no vops" attribute; arguments as in
6049 struct attribute_spec.handler. */
6050
6051static tree
6052handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
6053 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6054 bool *ARG_UNUSED (no_add_attrs))
6055{
6056 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
6057 DECL_IS_NOVOPS (*node) = 1;
6058 return NULL_TREE;
6059}
6060
6061/* Helper for nonnull attribute handling; fetch the operand number
6062 from the attribute argument list. */
6063
6064static bool
6065get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
6066{
6067 /* Verify the arg number is a constant. */
6068 if (TREE_CODE (arg_num_expr) != INTEGER_CST
6069 || TREE_INT_CST_HIGH (arg_num_expr) != 0)
6070 return false;
6071
6072 *valp = TREE_INT_CST_LOW (arg_num_expr);
6073 return true;
6074}
6075
6076/* Handle the "nonnull" attribute. */
6077static tree
6078handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
6079 tree args, int ARG_UNUSED (flags),
6080 bool *no_add_attrs)
6081{
6082 tree type = *node;
6083 unsigned HOST_WIDE_INT attr_arg_num;
6084
6085 /* If no arguments are specified, all pointer arguments should be
6086 non-null. Verify a full prototype is given so that the arguments
6087 will have the correct types when we actually check them later. */
6088 if (!args)
6089 {
f4da8dce 6090 if (!prototype_p (type))
a1ab4c31
AC
6091 {
6092 error ("nonnull attribute without arguments on a non-prototype");
6093 *no_add_attrs = true;
6094 }
6095 return NULL_TREE;
6096 }
6097
6098 /* Argument list specified. Verify that each argument number references
6099 a pointer argument. */
6100 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
6101 {
a1ab4c31
AC
6102 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
6103
6104 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
6105 {
6106 error ("nonnull argument has invalid operand number (argument %lu)",
6107 (unsigned long) attr_arg_num);
6108 *no_add_attrs = true;
6109 return NULL_TREE;
6110 }
6111
d7d058c5 6112 if (prototype_p (type))
a1ab4c31 6113 {
d7d058c5
NF
6114 function_args_iterator iter;
6115 tree argument;
6116
6117 function_args_iter_init (&iter, type);
6118 for (ck_num = 1; ; ck_num++, function_args_iter_next (&iter))
a1ab4c31 6119 {
d7d058c5 6120 argument = function_args_iter_cond (&iter);
a1ab4c31
AC
6121 if (!argument || ck_num == arg_num)
6122 break;
a1ab4c31
AC
6123 }
6124
6125 if (!argument
d7d058c5 6126 || TREE_CODE (argument) == VOID_TYPE)
a1ab4c31 6127 {
58c8f770
EB
6128 error ("nonnull argument with out-of-range operand number "
6129 "(argument %lu, operand %lu)",
a1ab4c31
AC
6130 (unsigned long) attr_arg_num, (unsigned long) arg_num);
6131 *no_add_attrs = true;
6132 return NULL_TREE;
6133 }
6134
d7d058c5 6135 if (TREE_CODE (argument) != POINTER_TYPE)
a1ab4c31 6136 {
58c8f770
EB
6137 error ("nonnull argument references non-pointer operand "
6138 "(argument %lu, operand %lu)",
a1ab4c31
AC
6139 (unsigned long) attr_arg_num, (unsigned long) arg_num);
6140 *no_add_attrs = true;
6141 return NULL_TREE;
6142 }
6143 }
6144 }
6145
6146 return NULL_TREE;
6147}
6148
6149/* Handle a "sentinel" attribute. */
6150
6151static tree
6152handle_sentinel_attribute (tree *node, tree name, tree args,
6153 int ARG_UNUSED (flags), bool *no_add_attrs)
6154{
f4da8dce 6155 if (!prototype_p (*node))
a1ab4c31
AC
6156 {
6157 warning (OPT_Wattributes,
7948ae37
OH
6158 "%qs attribute requires prototypes with named arguments",
6159 IDENTIFIER_POINTER (name));
a1ab4c31
AC
6160 *no_add_attrs = true;
6161 }
6162 else
6163 {
dcf0c47e 6164 if (!stdarg_p (*node))
a1ab4c31
AC
6165 {
6166 warning (OPT_Wattributes,
7948ae37
OH
6167 "%qs attribute only applies to variadic functions",
6168 IDENTIFIER_POINTER (name));
a1ab4c31
AC
6169 *no_add_attrs = true;
6170 }
6171 }
6172
6173 if (args)
6174 {
6175 tree position = TREE_VALUE (args);
6176
6177 if (TREE_CODE (position) != INTEGER_CST)
6178 {
6179 warning (0, "requested position is not an integer constant");
6180 *no_add_attrs = true;
6181 }
6182 else
6183 {
6184 if (tree_int_cst_lt (position, integer_zero_node))
6185 {
6186 warning (0, "requested position is less than zero");
6187 *no_add_attrs = true;
6188 }
6189 }
6190 }
6191
6192 return NULL_TREE;
6193}
6194
6195/* Handle a "noreturn" attribute; arguments as in
6196 struct attribute_spec.handler. */
6197
6198static tree
6199handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6200 int ARG_UNUSED (flags), bool *no_add_attrs)
6201{
6202 tree type = TREE_TYPE (*node);
6203
6204 /* See FIXME comment in c_common_attribute_table. */
6205 if (TREE_CODE (*node) == FUNCTION_DECL)
6206 TREE_THIS_VOLATILE (*node) = 1;
6207 else if (TREE_CODE (type) == POINTER_TYPE
6208 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
6209 TREE_TYPE (*node)
6210 = build_pointer_type
6211 (build_type_variant (TREE_TYPE (type),
6212 TYPE_READONLY (TREE_TYPE (type)), 1));
6213 else
6214 {
7948ae37
OH
6215 warning (OPT_Wattributes, "%qs attribute ignored",
6216 IDENTIFIER_POINTER (name));
a1ab4c31
AC
6217 *no_add_attrs = true;
6218 }
6219
6220 return NULL_TREE;
6221}
6222
0d6e14fd
JH
6223/* Handle a "leaf" attribute; arguments as in
6224 struct attribute_spec.handler. */
6225
6226static tree
6227handle_leaf_attribute (tree *node, tree name,
6228 tree ARG_UNUSED (args),
6229 int ARG_UNUSED (flags), bool *no_add_attrs)
6230{
6231 if (TREE_CODE (*node) != FUNCTION_DECL)
6232 {
6233 warning (OPT_Wattributes, "%qE attribute ignored", name);
6234 *no_add_attrs = true;
6235 }
6236 if (!TREE_PUBLIC (*node))
6237 {
32a5388a 6238 warning (OPT_Wattributes, "%qE attribute has no effect", name);
0d6e14fd
JH
6239 *no_add_attrs = true;
6240 }
6241
6242 return NULL_TREE;
6243}
6244
a1ab4c31
AC
6245/* Handle a "malloc" attribute; arguments as in
6246 struct attribute_spec.handler. */
6247
6248static tree
6249handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6250 int ARG_UNUSED (flags), bool *no_add_attrs)
6251{
6252 if (TREE_CODE (*node) == FUNCTION_DECL
6253 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
6254 DECL_IS_MALLOC (*node) = 1;
6255 else
6256 {
7948ae37
OH
6257 warning (OPT_Wattributes, "%qs attribute ignored",
6258 IDENTIFIER_POINTER (name));
a1ab4c31
AC
6259 *no_add_attrs = true;
6260 }
6261
6262 return NULL_TREE;
6263}
6264
6265/* Fake handler for attributes we don't properly support. */
6266
6267tree
6268fake_attribute_handler (tree * ARG_UNUSED (node),
6269 tree ARG_UNUSED (name),
6270 tree ARG_UNUSED (args),
6271 int ARG_UNUSED (flags),
6272 bool * ARG_UNUSED (no_add_attrs))
6273{
6274 return NULL_TREE;
6275}
6276
6277/* Handle a "type_generic" attribute. */
6278
6279static tree
6280handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
6281 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6282 bool * ARG_UNUSED (no_add_attrs))
6283{
a1ab4c31
AC
6284 /* Ensure we have a function type. */
6285 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
b4680ca1 6286
a1ab4c31 6287 /* Ensure we have a variadic function. */
dcf0c47e 6288 gcc_assert (!prototype_p (*node) || stdarg_p (*node));
a1ab4c31
AC
6289
6290 return NULL_TREE;
6291}
6292
2724e58f
OH
6293/* Handle a "vector_size" attribute; arguments as in
6294 struct attribute_spec.handler. */
6295
6296static tree
6297handle_vector_size_attribute (tree *node, tree name, tree args,
6298 int ARG_UNUSED (flags),
6299 bool *no_add_attrs)
6300{
6301 unsigned HOST_WIDE_INT vecsize, nunits;
6302 enum machine_mode orig_mode;
6303 tree type = *node, new_type, size;
6304
6305 *no_add_attrs = true;
6306
6307 size = TREE_VALUE (args);
6308
cc269bb6 6309 if (!tree_fits_uhwi_p (size))
2724e58f 6310 {
7948ae37
OH
6311 warning (OPT_Wattributes, "%qs attribute ignored",
6312 IDENTIFIER_POINTER (name));
2724e58f
OH
6313 return NULL_TREE;
6314 }
6315
6316 /* Get the vector size (in bytes). */
ae7e9ddd 6317 vecsize = tree_to_uhwi (size);
2724e58f
OH
6318
6319 /* We need to provide for vector pointers, vector arrays, and
6320 functions returning vectors. For example:
6321
6322 __attribute__((vector_size(16))) short *foo;
6323
6324 In this case, the mode is SI, but the type being modified is
6325 HI, so we need to look further. */
6326
6327 while (POINTER_TYPE_P (type)
6328 || TREE_CODE (type) == FUNCTION_TYPE
132a5459 6329 || TREE_CODE (type) == ARRAY_TYPE)
2724e58f
OH
6330 type = TREE_TYPE (type);
6331
6332 /* Get the mode of the type being modified. */
6333 orig_mode = TYPE_MODE (type);
6334
6335 if ((!INTEGRAL_TYPE_P (type)
6336 && !SCALAR_FLOAT_TYPE_P (type)
6337 && !FIXED_POINT_TYPE_P (type))
6338 || (!SCALAR_FLOAT_MODE_P (orig_mode)
6339 && GET_MODE_CLASS (orig_mode) != MODE_INT
6340 && !ALL_SCALAR_FIXED_POINT_MODE_P (orig_mode))
cc269bb6 6341 || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type))
2724e58f
OH
6342 || TREE_CODE (type) == BOOLEAN_TYPE)
6343 {
7948ae37
OH
6344 error ("invalid vector type for attribute %qs",
6345 IDENTIFIER_POINTER (name));
2724e58f
OH
6346 return NULL_TREE;
6347 }
6348
ae7e9ddd 6349 if (vecsize % tree_to_uhwi (TYPE_SIZE_UNIT (type)))
2724e58f
OH
6350 {
6351 error ("vector size not an integral multiple of component size");
6352 return NULL;
6353 }
6354
6355 if (vecsize == 0)
6356 {
6357 error ("zero vector size");
6358 return NULL;
6359 }
6360
6361 /* Calculate how many units fit in the vector. */
ae7e9ddd 6362 nunits = vecsize / tree_to_uhwi (TYPE_SIZE_UNIT (type));
2724e58f
OH
6363 if (nunits & (nunits - 1))
6364 {
6365 error ("number of components of the vector not a power of two");
6366 return NULL_TREE;
6367 }
6368
6369 new_type = build_vector_type (type, nunits);
6370
6371 /* Build back pointers if needed. */
9b469089 6372 *node = reconstruct_complex_type (*node, new_type);
2724e58f
OH
6373
6374 return NULL_TREE;
6375}
6376
7948ae37
OH
6377/* Handle a "vector_type" attribute; arguments as in
6378 struct attribute_spec.handler. */
6379
6380static tree
6381handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6382 int ARG_UNUSED (flags),
6383 bool *no_add_attrs)
6384{
6385 /* Vector representative type and size. */
6386 tree rep_type = *node;
6387 tree rep_size = TYPE_SIZE_UNIT (rep_type);
7948ae37
OH
6388
6389 /* Vector size in bytes and number of units. */
6390 unsigned HOST_WIDE_INT vec_bytes, vec_units;
6391
6392 /* Vector element type and mode. */
6393 tree elem_type;
6394 enum machine_mode elem_mode;
6395
6396 *no_add_attrs = true;
6397
7948ae37
OH
6398 if (TREE_CODE (rep_type) != ARRAY_TYPE)
6399 {
6400 error ("attribute %qs applies to array types only",
6401 IDENTIFIER_POINTER (name));
6402 return NULL_TREE;
6403 }
6404
6405 /* Silently punt on variable sizes. We can't make vector types for them,
6406 need to ignore them on front-end generated subtypes of unconstrained
6407 bases, and this attribute is for binding implementors, not end-users, so
6408 we should never get there from legitimate explicit uses. */
6409
cc269bb6 6410 if (!tree_fits_uhwi_p (rep_size))
7948ae37
OH
6411 return NULL_TREE;
6412
6413 /* Get the element type/mode and check this is something we know
6414 how to make vectors of. */
6415
6416 elem_type = TREE_TYPE (rep_type);
6417 elem_mode = TYPE_MODE (elem_type);
6418
6419 if ((!INTEGRAL_TYPE_P (elem_type)
6420 && !SCALAR_FLOAT_TYPE_P (elem_type)
6421 && !FIXED_POINT_TYPE_P (elem_type))
6422 || (!SCALAR_FLOAT_MODE_P (elem_mode)
6423 && GET_MODE_CLASS (elem_mode) != MODE_INT
6424 && !ALL_SCALAR_FIXED_POINT_MODE_P (elem_mode))
cc269bb6 6425 || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (elem_type)))
7948ae37
OH
6426 {
6427 error ("invalid element type for attribute %qs",
6428 IDENTIFIER_POINTER (name));
6429 return NULL_TREE;
6430 }
6431
6432 /* Sanity check the vector size and element type consistency. */
6433
ae7e9ddd 6434 vec_bytes = tree_to_uhwi (rep_size);
7948ae37 6435
ae7e9ddd 6436 if (vec_bytes % tree_to_uhwi (TYPE_SIZE_UNIT (elem_type)))
7948ae37
OH
6437 {
6438 error ("vector size not an integral multiple of component size");
6439 return NULL;
6440 }
6441
6442 if (vec_bytes == 0)
6443 {
6444 error ("zero vector size");
6445 return NULL;
6446 }
6447
ae7e9ddd 6448 vec_units = vec_bytes / tree_to_uhwi (TYPE_SIZE_UNIT (elem_type));
7948ae37
OH
6449 if (vec_units & (vec_units - 1))
6450 {
6451 error ("number of components of the vector not a power of two");
6452 return NULL_TREE;
6453 }
6454
6455 /* Build the vector type and replace. */
6456
6457 *node = build_vector_type (elem_type, vec_units);
7948ae37
OH
6458 TYPE_REPRESENTATIVE_ARRAY (*node) = rep_type;
6459
6460 return NULL_TREE;
6461}
6462
a1ab4c31
AC
6463/* ----------------------------------------------------------------------- *
6464 * BUILTIN FUNCTIONS *
6465 * ----------------------------------------------------------------------- */
6466
6467/* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
6468 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
6469 if nonansi_p and flag_no_nonansi_builtin. */
6470
6471static void
6472def_builtin_1 (enum built_in_function fncode,
6473 const char *name,
6474 enum built_in_class fnclass,
6475 tree fntype, tree libtype,
6476 bool both_p, bool fallback_p,
6477 bool nonansi_p ATTRIBUTE_UNUSED,
6478 tree fnattrs, bool implicit_p)
6479{
6480 tree decl;
6481 const char *libname;
6482
6483 /* Preserve an already installed decl. It most likely was setup in advance
6484 (e.g. as part of the internal builtins) for specific reasons. */
e79983f4 6485 if (builtin_decl_explicit (fncode) != NULL_TREE)
a1ab4c31
AC
6486 return;
6487
6488 gcc_assert ((!both_p && !fallback_p)
6489 || !strncmp (name, "__builtin_",
6490 strlen ("__builtin_")));
6491
6492 libname = name + strlen ("__builtin_");
6493 decl = add_builtin_function (name, fntype, fncode, fnclass,
6494 (fallback_p ? libname : NULL),
6495 fnattrs);
6496 if (both_p)
6497 /* ??? This is normally further controlled by command-line options
6498 like -fno-builtin, but we don't have them for Ada. */
6499 add_builtin_function (libname, libtype, fncode, fnclass,
6500 NULL, fnattrs);
6501
e79983f4 6502 set_builtin_decl (fncode, decl, implicit_p);
a1ab4c31
AC
6503}
6504
6505static int flag_isoc94 = 0;
6506static int flag_isoc99 = 0;
6507
6508/* Install what the common builtins.def offers. */
6509
6510static void
6511install_builtin_functions (void)
6512{
6513#define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
6514 NONANSI_P, ATTRS, IMPLICIT, COND) \
6515 if (NAME && COND) \
6516 def_builtin_1 (ENUM, NAME, CLASS, \
6517 builtin_types[(int) TYPE], \
6518 builtin_types[(int) LIBTYPE], \
6519 BOTH_P, FALLBACK_P, NONANSI_P, \
6520 built_in_attributes[(int) ATTRS], IMPLICIT);
6521#include "builtins.def"
6522#undef DEF_BUILTIN
6523}
6524
6525/* ----------------------------------------------------------------------- *
6526 * BUILTIN FUNCTIONS *
6527 * ----------------------------------------------------------------------- */
6528
6529/* Install the builtin functions we might need. */
6530
6531void
6532gnat_install_builtins (void)
6533{
6534 install_builtin_elementary_types ();
6535 install_builtin_function_types ();
6536 install_builtin_attributes ();
6537
6538 /* Install builtins used by generic middle-end pieces first. Some of these
6539 know about internal specificities and control attributes accordingly, for
6540 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
6541 the generic definition from builtins.def. */
384c400a 6542 build_common_builtin_nodes ();
a1ab4c31
AC
6543
6544 /* Now, install the target specific builtins, such as the AltiVec family on
6545 ppc, and the common set as exposed by builtins.def. */
6546 targetm.init_builtins ();
6547 install_builtin_functions ();
6548}
6549
6550#include "gt-ada-utils.h"
6551#include "gtype-ada.h"